Merge "Add ALPN protocol scenario tests"
diff --git a/octavia_tempest_plugin/common/constants.py b/octavia_tempest_plugin/common/constants.py
index 09582f2..1f0a738 100644
--- a/octavia_tempest_plugin/common/constants.py
+++ b/octavia_tempest_plugin/common/constants.py
@@ -57,6 +57,7 @@
 INSERT_HEADERS = 'insert_headers'
 X_FORWARDED_FOR = 'X-Forwarded-For'
 X_FORWARDED_PORT = 'X-Forwarded-Port'
+X_FORWARDED_PROTO = 'X-Forwarded-Proto'
 TAGS = 'tags'
 TIMEOUT_CLIENT_DATA = 'timeout_client_data'
 TIMEOUT_MEMBER_CONNECT = 'timeout_member_connect'
@@ -125,6 +126,7 @@
 # Protocols
 HTTP = 'HTTP'
 HTTPS = 'HTTPS'
+PROXY = 'PROXY'
 TCP = 'TCP'
 TERMINATED_HTTPS = 'TERMINATED_HTTPS'
 UDP = 'UDP'
diff --git a/octavia_tempest_plugin/common/requests_adapters.py b/octavia_tempest_plugin/common/requests_adapters.py
new file mode 100644
index 0000000..e37fab3
--- /dev/null
+++ b/octavia_tempest_plugin/common/requests_adapters.py
@@ -0,0 +1,32 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import socket
+
+from requests.adapters import HTTPAdapter
+from requests.packages.urllib3.connection import HTTPConnection
+from requests.packages.urllib3.poolmanager import PoolManager
+
+
+class SourcePortAdapter(HTTPAdapter):
+    """"Transport adapter" that allows us to set the source port."""
+    def __init__(self, port, *args, **kwargs):
+        self._source_port = port
+        super(SourcePortAdapter, self).__init__(*args, **kwargs)
+
+    def init_poolmanager(self, connections, maxsize, block=False):
+        # Make sure TIMED_WAIT doesn't stop us from reusing the socket
+        sock_options = HTTPConnection.default_socket_options + [
+            (socket.SOL_SOCKET, socket.SO_REUSEADDR, 1), ]
+        self.poolmanager = PoolManager(
+            num_pools=connections, maxsize=maxsize,
+            block=block, source_address=('', self._source_port),
+            socket_options=sock_options)
diff --git a/octavia_tempest_plugin/config.py b/octavia_tempest_plugin/config.py
index 601c6c8..77d2f6e 100644
--- a/octavia_tempest_plugin/config.py
+++ b/octavia_tempest_plugin/config.py
@@ -50,10 +50,10 @@
                choices=['public', 'admin', 'internal',
                         'publicURL', 'adminURL', 'internalURL'],
                help="The endpoint type to use for the load-balancer service"),
-    cfg.IntOpt('build_interval',
-               default=5,
-               help='Time in seconds between build status checks for '
-                    'non-load-balancer resources to build'),
+    cfg.FloatOpt('build_interval',
+                 default=5,
+                 help='Time in seconds between build status checks for '
+                      'non-load-balancer resources to build'),
     cfg.IntOpt('build_timeout',
                default=300,
                help='Timeout in seconds to wait for non-load-balancer '
@@ -62,9 +62,9 @@
                help='The service_auth username the Octavia services are using'
                     'to access other OpenStack services.'),
     # load-balancer specific options
-    cfg.IntOpt('check_interval',
-               default=5,
-               help='Interval to check for status changes.'),
+    cfg.FloatOpt('check_interval',
+                 default=5,
+                 help='Interval to check for status changes.'),
     cfg.IntOpt('check_timeout',
                default=120,
                help='Timeout, in seconds, to wait for a status change.'),
@@ -72,10 +72,10 @@
                 default=False,
                 help='Runs the tests assuming no-op drivers are being used. '
                      'Tests will assume no actual amphora are created.'),
-    cfg.IntOpt('lb_build_interval',
-               default=10,
-               help='Time in seconds between build status checks for a '
-                    'load balancer.'),
+    cfg.FloatOpt('lb_build_interval',
+                 default=10,
+                 help='Time in seconds between build status checks for a '
+                      'load balancer.'),
     cfg.IntOpt('lb_build_timeout',
                default=900,
                help='Timeout in seconds to wait for a '
diff --git a/octavia_tempest_plugin/tests/api/v2/test_healthmonitor.py b/octavia_tempest_plugin/tests/api/v2/test_healthmonitor.py
index 5f4be23..d3f12ba 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_healthmonitor.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_healthmonitor.py
@@ -12,6 +12,7 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import testtools
 import time
 from uuid import UUID
 
@@ -32,13 +33,6 @@
 
 class HealthMonitorAPITest(test_base.LoadBalancerBaseTest):
     """Test the healthmonitor object API."""
-
-    @classmethod
-    def skip_checks(cls):
-        super(HealthMonitorAPITest, cls).skip_checks()
-        if not CONF.loadbalancer_feature_enabled.health_monitor_enabled:
-            raise cls.skipException('Health Monitors not supported')
-
     @classmethod
     def resource_setup(cls):
         """Setup resources needed by the tests."""
@@ -62,8 +56,152 @@
                                 CONF.load_balancer.lb_build_interval,
                                 CONF.load_balancer.lb_build_timeout)
 
-    @decorators.idempotent_id('30288670-5772-40c2-92e6-6d4a6d62d029')
-    def test_healthmonitor_create(self):
+    @decorators.idempotent_id('bc3fc817-3368-4e1e-bb6d-52c4de3fb10c')
+    def test_LC_HTTP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('18fcd6b8-9fc3-4858-83f6-d8800052b655')
+    def test_LC_HTTPS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('5b0446f2-374e-4e74-9865-72e16a19c587')
+    def test_LC_PING_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('46c4e159-84d2-4876-9298-ac85561d3bd0')
+    def test_LC_TCP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('a15f7319-2d3b-4ec8-9d70-e77a55045145')
+    def test_LC_TLS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('f609b2c2-391d-4bc9-9793-9a4bc30ab00b')
+    def test_LC_UDP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('4f20473f-ab02-4426-8d15-cf34b3c72558')
+    def test_RR_HTTP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('abfff805-5491-4aef-9952-45b553cbebbb')
+    def test_RR_HTTPS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('ac64228e-dc4c-4116-b610-5783a85a87f1')
+    def test_RR_PING_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('e3ac7193-1dc9-41df-a4e2-7d40ca70a678')
+    def test_RR_TCP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('54952a9a-b3ef-4b36-a586-9adcd63dfc49')
+    def test_RR_TLS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('9af20b3c-fc42-4365-a4e9-cecbdddf90c0')
+    def test_RR_UDP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.UDP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('35b076a4-dfb1-4557-9eac-a33982f73856')
+    def test_SI_HTTP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('965804cb-d6a1-4fdd-99dc-948829e0c046')
+    def test_SI_HTTPS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('eb86eab1-4cdf-40fc-b633-679d7fb64806')
+    def test_SI_PING_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('6bdab976-b6ad-4c83-87ab-0d184e80eb2c')
+    def test_SI_TCP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('4f8111dd-4f03-4740-ae4b-b13b731f45a0')
+    def test_SI_TLS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('d98365ca-56ba-450d-955f-d1c06c329960')
+    def test_SI_UDP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('acab47f5-3006-4e84-a55f-e9dfe33113d2')
+    def test_SIP_HTTP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('ca42a541-3280-4496-8604-9ce64e1992d6')
+    def test_SIP_HTTPS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('78c8e5fc-3ba0-44d0-ac4a-93a90fb59c3f')
+    def test_SIP_PING_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('d5b50306-a3bd-4293-96ed-17a2897d57cc')
+    def test_SIP_TCP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('204556df-185e-4305-b1b7-e8d345d645e4')
+    def test_SIP_TLS_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('2de53a00-c631-4985-abc1-89070ac2515f')
+    def test_SIP_UDP_healthmonitor_create(self):
+        self._test_healthmonitor_create(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    def _test_healthmonitor_create(self, pool_protocol, pool_algorithm,
+                                   hm_type):
         """Tests healthmonitor create and basic show APIs.
 
         * Create a clean pool to use for the healthmonitor.
@@ -73,15 +211,36 @@
         * Show healthmonitor details.
         * Validate the show reflects the requested values.
         """
+        if (pool_algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         pool_name = data_utils.rand_name("lb_member_pool1_hm-create")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if pool_algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addCleanup(
             self.mem_pool_client.cleanup_pool, pool[const.ID],
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
@@ -93,19 +252,21 @@
                                 CONF.load_balancer.build_timeout)
 
         hm_name = data_utils.rand_name("lb_member_hm1-create")
+        delay = 3 if hm_type == const.HEALTH_MONITOR_UDP_CONNECT else 2
         hm_kwargs = {
             const.POOL_ID: pool[const.ID],
             const.NAME: hm_name,
-            const.TYPE: const.HEALTH_MONITOR_HTTP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 3,
             const.MAX_RETRIES: 4,
             const.MAX_RETRIES_DOWN: 5,
-            const.HTTP_METHOD: const.GET,
-            const.URL_PATH: '/',
-            const.EXPECTED_CODES: '200-204',
             const.ADMIN_STATE_UP: True,
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm_kwargs.update({const.HTTP_METHOD: const.GET,
+                              const.URL_PATH: '/',
+                              const.EXPECTED_CODES: '200-204'})
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -149,8 +310,10 @@
 
         equal_items = [const.NAME, const.TYPE, const.DELAY, const.TIMEOUT,
                        const.MAX_RETRIES, const.MAX_RETRIES_DOWN,
-                       const.HTTP_METHOD, const.URL_PATH, const.EXPECTED_CODES,
                        const.ADMIN_STATE_UP]
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            equal_items = equal_items + [const.HTTP_METHOD, const.URL_PATH,
+                                         const.EXPECTED_CODES]
 
         for item in equal_items:
             self.assertEqual(hm_kwargs[item], hm[item])
@@ -167,8 +330,151 @@
     def _filter_hms_by_index(self, hms, indexes):
         return [hm for i, hm in enumerate(hms) if i not in indexes]
 
+    @decorators.idempotent_id('9e4b1298-b6a0-46c7-b8e4-afcd31f904d3')
+    def test_LC_HTTP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTP,
+                                      const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                      const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('7488b1e1-12b5-4a42-9d78-9e08060ec7b1')
+    def test_LC_HTTPS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTPS,
+                                      const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                      const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('1f93483c-29ae-4ac1-a588-9ce9bd837232')
+    def test_LC_PING_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                      const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('49ccd27c-3a4c-41e5-bcb0-d4f03ecc3e79')
+    def test_LC_TCP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                      const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('60a254dc-4764-45f2-a183-8102063462e0')
+    def test_LC_TLS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                      const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('6637d37d-76aa-455a-ba73-8f1a12edcedd')
+    def test_LC_UDP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.UDP,
+                                      const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                      const.HEALTH_MONITOR_UDP_CONNECT)
+
     @decorators.idempotent_id('c9a9f20c-3680-4ae8-b657-33c687258fea')
-    def test_healthmonitor_list(self):
+    def test_RR_HTTP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTP,
+                                      const.LB_ALGORITHM_ROUND_ROBIN,
+                                      const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('93c0d03a-eb32-457b-a5af-96c03a891c06')
+    def test_RR_HTTPS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTPS,
+                                      const.LB_ALGORITHM_ROUND_ROBIN,
+                                      const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('2ef2944f-dd56-40a5-9100-4e1b86c623af')
+    def test_RR_PING_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_ROUND_ROBIN,
+                                      const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('5912534f-20b3-45b7-9907-9247bf05cd13')
+    def test_RR_TCP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_ROUND_ROBIN,
+                                      const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('5d169fe7-16f3-4f70-8b1e-72aeeec4fd61')
+    def test_RR_TLS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_ROUND_ROBIN,
+                                      const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('3f81050e-4218-46fa-8d85-09807b8cdded')
+    def test_RR_UDP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.UDP,
+                                      const.LB_ALGORITHM_ROUND_ROBIN,
+                                      const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('2df98839-7b2a-46c4-9da7-34e3d1c33851')
+    def test_SI_HTTP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTP,
+                                      const.LB_ALGORITHM_SOURCE_IP,
+                                      const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('1ce28897-917c-4e7a-89bb-cc2aabd67037')
+    def test_SI_HTTPS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTPS,
+                                      const.LB_ALGORITHM_SOURCE_IP,
+                                      const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('e8418eaa-73a5-4d56-8ca5-314dd2141dc9')
+    def test_SI_PING_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_SOURCE_IP,
+                                      const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('25203919-a039-43a4-84db-15279cbd2ec7')
+    def test_SI_TCP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_SOURCE_IP,
+                                      const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('4f84c05e-d4a7-4998-98cd-bc74024309f4')
+    def test_SI_TLS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_SOURCE_IP,
+                                      const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('4cb10c86-a875-4a9e-be8f-c0afc8aa5633')
+    def test_SI_UDP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.UDP,
+                                      const.LB_ALGORITHM_SOURCE_IP,
+                                      const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('45bdd757-3132-4ede-8584-c46bc2f8f19e')
+    def test_SIP_HTTP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTP,
+                                      const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                      const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('300db789-b231-45be-863d-f4d6116660d3')
+    def test_SIP_HTTPS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.HTTPS,
+                                      const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                      const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('a1d534ba-ada9-4d7c-8e17-6e520a27c110')
+    def test_SIP_PING_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                      const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('12c29b89-bbbc-46b0-89c5-beb42fc52181')
+    def test_SIP_TCP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                      const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('a3b01b12-f4cb-4b2a-9f62-af24834ce19b')
+    def test_SIP_TLS_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.TCP,
+                                      const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                      const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('571e139b-08ae-4e8c-b25c-0e0bb9d198af')
+    def test_SIP_UDP_healthmonitor_list(self):
+        self._test_healthmonitor_list(const.UDP,
+                                      const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                      const.HEALTH_MONITOR_UDP_CONNECT)
+
+    def _test_healthmonitor_list(self, pool_protocol, pool_algorithm, hm_type):
         """Tests healthmonitor list API and field filtering.
 
         * Create three clean pools to use for the healthmonitors.
@@ -182,6 +488,13 @@
         * List the healthmonitors filtering to one of the three.
         * List the healthmonitors filtered, one field, and sorted.
         """
+        if (pool_algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         # Get a list of pre-existing HMs to filter from test data
         pretest_hms = self.mem_healthmonitor_client.list_healthmonitors()
         # Store their IDs for easy access
@@ -190,12 +503,26 @@
         pool1_name = data_utils.rand_name("lb_member_pool1_hm-list")
         pool1_kwargs = {
             const.NAME: pool1_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
-        pool1 = self.mem_pool_client.create_pool(**pool1_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool1 = self.mem_pool_client.create_pool(**pool1_kwargs)
+        except exceptions.NotImplemented as e:
+            if pool_algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         pool1_id = pool1[const.ID]
         self.addCleanup(
             self.mem_pool_client.cleanup_pool, pool1_id,
@@ -210,8 +537,8 @@
         pool2_name = data_utils.rand_name("lb_member_pool2_hm-list")
         pool2_kwargs = {
             const.NAME: pool2_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
@@ -230,8 +557,8 @@
         pool3_name = data_utils.rand_name("lb_member_pool3_hm-list")
         pool3_kwargs = {
             const.NAME: pool3_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
@@ -248,19 +575,21 @@
                                 CONF.load_balancer.build_timeout)
 
         hm1_name = data_utils.rand_name("lb_member_hm2-list")
+        delay = 3 if hm_type == const.HEALTH_MONITOR_UDP_CONNECT else 2
         hm1_kwargs = {
             const.POOL_ID: pool1_id,
             const.NAME: hm1_name,
-            const.TYPE: const.HEALTH_MONITOR_HTTP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 3,
-            const.MAX_RETRIES: 4,
+            const.MAX_RETRIES: 2,
             const.MAX_RETRIES_DOWN: 5,
-            const.HTTP_METHOD: const.GET,
-            const.URL_PATH: '/B',
-            const.EXPECTED_CODES: '200-204',
             const.ADMIN_STATE_UP: True,
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm1_kwargs.update({const.HTTP_METHOD: const.GET,
+                               const.URL_PATH: '/B',
+                               const.EXPECTED_CODES: '200-204'})
 
         if self.mem_healthmonitor_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -294,16 +623,17 @@
         hm2_kwargs = {
             const.POOL_ID: pool2_id,
             const.NAME: hm2_name,
-            const.TYPE: const.HEALTH_MONITOR_HTTP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 3,
-            const.MAX_RETRIES: 4,
+            const.MAX_RETRIES: 1,
             const.MAX_RETRIES_DOWN: 5,
-            const.HTTP_METHOD: const.GET,
-            const.URL_PATH: '/A',
-            const.EXPECTED_CODES: '200-204',
             const.ADMIN_STATE_UP: True,
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm2_kwargs.update({const.HTTP_METHOD: const.GET,
+                               const.URL_PATH: '/A',
+                               const.EXPECTED_CODES: '200-204'})
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -337,16 +667,17 @@
         hm3_kwargs = {
             const.POOL_ID: pool3_id,
             const.NAME: hm3_name,
-            const.TYPE: const.HEALTH_MONITOR_HTTP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 3,
-            const.MAX_RETRIES: 4,
+            const.MAX_RETRIES: 3,
             const.MAX_RETRIES_DOWN: 5,
-            const.HTTP_METHOD: const.GET,
-            const.URL_PATH: '/C',
-            const.EXPECTED_CODES: '200-204',
             const.ADMIN_STATE_UP: False,
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm3_kwargs.update({const.HTTP_METHOD: const.GET,
+                               const.URL_PATH: '/C',
+                               const.EXPECTED_CODES: '200-204'})
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -388,38 +719,78 @@
         # Check the default sort order, created_at
         hms = self.mem_healthmonitor_client.list_healthmonitors()
         hms = self._filter_hms_by_pool_id(hms, (pool1_id, pool2_id, pool3_id))
-        self.assertEqual(hm1[const.URL_PATH],
-                         hms[0][const.URL_PATH])
-        self.assertEqual(hm2[const.URL_PATH],
-                         hms[1][const.URL_PATH])
-        self.assertEqual(hm3[const.URL_PATH],
-                         hms[2][const.URL_PATH])
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            self.assertEqual(hm1[const.URL_PATH],
+                             hms[0][const.URL_PATH])
+            self.assertEqual(hm2[const.URL_PATH],
+                             hms[1][const.URL_PATH])
+            self.assertEqual(hm3[const.URL_PATH],
+                             hms[2][const.URL_PATH])
+        else:
+            self.assertEqual(hm1[const.MAX_RETRIES],
+                             hms[0][const.MAX_RETRIES])
+            self.assertEqual(hm2[const.MAX_RETRIES],
+                             hms[1][const.MAX_RETRIES])
+            self.assertEqual(hm3[const.MAX_RETRIES],
+                             hms[2][const.MAX_RETRIES])
 
         # Test sort descending by description
-        hms = self.mem_healthmonitor_client.list_healthmonitors(
-            query_params='{sort}={url_path}:{desc}'
-                         .format(sort=const.SORT,
-                                 url_path=const.URL_PATH, desc=const.DESC))
-        hms = self._filter_hms_by_pool_id(hms, (pool1_id, pool2_id, pool3_id))
-        self.assertEqual(hm1[const.URL_PATH],
-                         hms[1][const.URL_PATH])
-        self.assertEqual(hm2[const.URL_PATH],
-                         hms[2][const.URL_PATH])
-        self.assertEqual(hm3[const.URL_PATH],
-                         hms[0][const.URL_PATH])
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hms = self.mem_healthmonitor_client.list_healthmonitors(
+                query_params='{sort}={url_path}:{desc}'
+                             .format(sort=const.SORT,
+                                     url_path=const.URL_PATH, desc=const.DESC))
+            hms = self._filter_hms_by_pool_id(hms,
+                                              (pool1_id, pool2_id, pool3_id))
+            self.assertEqual(hm1[const.URL_PATH],
+                             hms[1][const.URL_PATH])
+            self.assertEqual(hm2[const.URL_PATH],
+                             hms[2][const.URL_PATH])
+            self.assertEqual(hm3[const.URL_PATH],
+                             hms[0][const.URL_PATH])
+        else:
+            hms = self.mem_healthmonitor_client.list_healthmonitors(
+                query_params='{sort}={url_path}:{desc}'
+                             .format(sort=const.SORT,
+                                     url_path=const.MAX_RETRIES,
+                                     desc=const.DESC))
+            hms = self._filter_hms_by_pool_id(hms,
+                                              (pool1_id, pool2_id, pool3_id))
+            self.assertEqual(hm1[const.MAX_RETRIES],
+                             hms[1][const.MAX_RETRIES])
+            self.assertEqual(hm2[const.MAX_RETRIES],
+                             hms[2][const.MAX_RETRIES])
+            self.assertEqual(hm3[const.MAX_RETRIES],
+                             hms[0][const.MAX_RETRIES])
 
         # Test sort ascending by description
-        hms = self.mem_healthmonitor_client.list_healthmonitors(
-            query_params='{sort}={url_path}:{asc}'
-                         .format(sort=const.SORT,
-                                 url_path=const.URL_PATH, asc=const.ASC))
-        hms = self._filter_hms_by_pool_id(hms, (pool1_id, pool2_id, pool3_id))
-        self.assertEqual(hm1[const.URL_PATH],
-                         hms[1][const.URL_PATH])
-        self.assertEqual(hm2[const.URL_PATH],
-                         hms[0][const.URL_PATH])
-        self.assertEqual(hm3[const.URL_PATH],
-                         hms[2][const.URL_PATH])
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hms = self.mem_healthmonitor_client.list_healthmonitors(
+                query_params='{sort}={url_path}:{asc}'
+                             .format(sort=const.SORT,
+                                     url_path=const.URL_PATH, asc=const.ASC))
+            hms = self._filter_hms_by_pool_id(hms,
+                                              (pool1_id, pool2_id, pool3_id))
+            self.assertEqual(hm1[const.URL_PATH],
+                             hms[1][const.URL_PATH])
+            self.assertEqual(hm2[const.URL_PATH],
+                             hms[0][const.URL_PATH])
+            self.assertEqual(hm3[const.URL_PATH],
+                             hms[2][const.URL_PATH])
+        else:
+            hms = self.mem_healthmonitor_client.list_healthmonitors(
+                query_params='{sort}={url_path}:{asc}'
+                             .format(sort=const.SORT,
+                                     url_path=const.MAX_RETRIES,
+                                     asc=const.ASC))
+            hms = self._filter_hms_by_pool_id(hms,
+                                              (pool1_id, pool2_id, pool3_id))
+            self.assertEqual(hm1[const.MAX_RETRIES],
+                             hms[1][const.MAX_RETRIES])
+            self.assertEqual(hm2[const.MAX_RETRIES],
+                             hms[0][const.MAX_RETRIES])
+            self.assertEqual(hm3[const.MAX_RETRIES],
+                             hms[2][const.MAX_RETRIES])
 
         # Determine indexes of pretest HMs in default sort
         pretest_hm_indexes = []
@@ -509,8 +880,151 @@
             self.assertTrue(not any(["" in hm[const.TAGS]
                                      for hm in list_of_hms]))
 
+    @decorators.idempotent_id('358afb0b-6259-46be-a0b3-b11e5e202624')
+    def test_LC_HTTP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('c2476eea-8ae1-40ed-be55-0125f9399bd4')
+    def test_LC_HTTPS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('1489251c-0704-4735-bf62-801b5277c5c9')
+    def test_LC_PING_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('c761f90e-7b9c-400b-b540-e7c14f65d0a8')
+    def test_LC_TCP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('8ed512d7-9fd7-4932-bf5f-090498b384bb')
+    def test_LC_TLS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('a3a9be7e-83d6-42cb-b603-f14a464b8268')
+    def test_LC_UDP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
     @decorators.idempotent_id('284e8d3b-7b2d-4697-9e41-580b3423c0b4')
-    def test_healthmonitor_show(self):
+    def test_RR_HTTP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('eeb4967b-ce46-4717-a750-3e740223a804')
+    def test_RR_HTTPS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('799ada1b-c082-42c5-b6ea-477f10fc88ce')
+    def test_RR_PING_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('d2eae7f4-43b4-4696-93ed-a30f95c978fe')
+    def test_RR_TCP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('5ce6eecc-d425-47cd-809f-aab5c56e1a9d')
+    def test_RR_TLS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('a6a46e6a-a063-46bf-972c-86d0305fb766')
+    def test_RR_UDP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.UDP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('54d693ba-1ba4-4388-b020-c29dc3184522')
+    def test_SI_HTTP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('2638718e-b345-4868-b527-9bed575e27d6')
+    def test_SI_HTTPS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('124a5ca1-5eae-4ed0-8528-7a499e9ad7a2')
+    def test_SI_PING_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('098ae671-1791-455a-a5a8-ada8c592a2dd')
+    def test_SI_TCP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('eef8f780-b557-447f-9f61-b1f3e6daec77')
+    def test_SI_TLS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('9b6d9c45-4696-4f6a-8816-594b03e3ee5b')
+    def test_SI_UDP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('5653ea25-c7e0-4c72-8b2a-19dd97dd5a69')
+    def test_SIP_HTTP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('fff4472d-b4de-4b8b-9748-476ffc7c8e13')
+    def test_SIP_HTTPS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('1af98ebe-3f3c-4e5f-8f72-ecbd9b25c69f')
+    def test_SIP_PING_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('40169a7d-84ac-4362-b8d7-64b9b807ce7e')
+    def test_SIP_TCP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('15146c2e-e1c1-48ac-a7d8-3a1b4de590b2')
+    def test_SIP_TLS_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('008042da-3734-4bbb-b8b2-f4ad9e2dab21')
+    def test_SIP_UDP_healthmonitor_show(self):
+        self._test_healthmonitor_show(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    def _test_healthmonitor_show(self, pool_protocol, pool_algorithm, hm_type):
         """Tests healthmonitor show API.
 
         * Create a clean pool to use for the healthmonitor.
@@ -519,15 +1033,36 @@
         * Validate the show reflects the requested values.
         * Validates that other accounts cannot see the healthmonitor.
         """
+        if (pool_algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         pool_name = data_utils.rand_name("lb_member_pool1_hm-show")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if pool_algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addCleanup(
             self.mem_pool_client.cleanup_pool, pool[const.ID],
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
@@ -539,19 +1074,21 @@
                                 CONF.load_balancer.build_timeout)
 
         hm_name = data_utils.rand_name("lb_member_hm1-show")
+        delay = 3 if hm_type == const.HEALTH_MONITOR_UDP_CONNECT else 2
         hm_kwargs = {
             const.POOL_ID: pool[const.ID],
             const.NAME: hm_name,
-            const.TYPE: const.HEALTH_MONITOR_HTTP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 3,
             const.MAX_RETRIES: 4,
             const.MAX_RETRIES_DOWN: 5,
-            const.HTTP_METHOD: const.GET,
-            const.URL_PATH: '/',
-            const.EXPECTED_CODES: '200-204',
             const.ADMIN_STATE_UP: True,
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm_kwargs.update({const.HTTP_METHOD: const.GET,
+                              const.URL_PATH: '/',
+                              const.EXPECTED_CODES: '200-204'})
 
         hm = self.mem_healthmonitor_client.create_healthmonitor(**hm_kwargs)
         self.addCleanup(
@@ -580,8 +1117,10 @@
 
         equal_items = [const.NAME, const.TYPE, const.DELAY, const.TIMEOUT,
                        const.MAX_RETRIES, const.MAX_RETRIES_DOWN,
-                       const.HTTP_METHOD, const.URL_PATH, const.EXPECTED_CODES,
                        const.ADMIN_STATE_UP]
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            equal_items = equal_items + [const.HTTP_METHOD, const.URL_PATH,
+                                         const.EXPECTED_CODES]
 
         for item in equal_items:
             self.assertEqual(hm_kwargs[item], hm[item])
@@ -614,8 +1153,152 @@
                 self.os_primary.healthmonitor_client.show_healthmonitor,
                 hm[const.ID])
 
+    @decorators.idempotent_id('2417164b-ec03-4488-afd2-60b096dc0077')
+    def test_LC_HTTP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('5d88aa1e-2db9-43f8-bb9b-4673c2060835')
+    def test_LC_HTTPS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('61908142-9768-44aa-9a4b-b3904560a0dc')
+    def test_LC_PING_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('649bdfa3-1009-4f88-bc92-c3e3141c493e')
+    def test_LC_TCP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('365fffd3-0817-4907-aab1-7da60736ba60')
+    def test_LC_TLS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('df29f696-a321-4626-acb2-6f66105e1661')
+    def test_LC_UDP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
     @decorators.idempotent_id('fa584b2c-f179-4c4e-ad2e-ff51fd1c5973')
-    def test_healthmonitor_update(self):
+    def test_RR_HTTP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('b927836a-2770-46ff-92de-3031c5240da6')
+    def test_RR_HTTPS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('cdd559b1-5c7b-492f-9c8d-c1da6e8d7b3b')
+    def test_RR_PING_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('1b9c310e-cffe-4f6a-b1af-021f751fc2a9')
+    def test_RR_TCP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('4b3c9a67-f884-43a3-8f42-bac68be7060b')
+    def test_RR_TLS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('805e2976-962a-4bb0-a9cc-97270a42c376')
+    def test_RR_UDP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.UDP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('654e4ce3-b579-4595-b1a1-6762f64b2408')
+    def test_SI_HTTP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('879ef60a-4621-45aa-a520-b57da3b1fddc')
+    def test_SI_HTTPS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('66e83157-53c3-4eac-a7f0-e3dc4f51de06')
+    def test_SI_PING_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('ce475c4c-d01a-4cde-be71-555c84f2b8da')
+    def test_SI_TCP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('b292478f-5c26-462e-b222-103be3b115d3')
+    def test_SI_TLS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('49ccc552-752b-4f84-9900-65908cb13add')
+    def test_SI_UDP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('d141d8d1-fa12-49cb-9d6d-413998aa2dc5')
+    def test_SIP_HTTP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('9a1bbbbb-c640-48cb-bd1a-e3d3fd2602af')
+    def test_SIP_HTTPS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('3f573e31-49b9-45d0-bb00-8483f48ae422')
+    def test_SIP_PING_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('1a6922fd-9e8c-4836-9a6a-087f09249a49')
+    def test_SIP_TCP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('e9468e81-dbff-4e88-8d4b-e2a54835c2d8')
+    def test_SIP_TLS_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('64379cb2-b789-4fe6-ae3a-e62b907c6365')
+    def test_SIP_UDP_healthmonitor_update(self):
+        self._test_healthmonitor_update(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    def _test_healthmonitor_update(self, pool_protocol, pool_algorithm,
+                                   hm_type):
         """Tests healthmonitor update and show APIs.
 
         * Create a clean pool to use for the healthmonitor.
@@ -627,15 +1310,36 @@
         * Show healthmonitor details.
         * Validate the show reflects the updated values.
         """
+        if (pool_algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         pool_name = data_utils.rand_name("lb_member_pool1_hm-update")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if pool_algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addCleanup(
             self.mem_pool_client.cleanup_pool, pool[const.ID],
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
@@ -647,19 +1351,21 @@
                                 CONF.load_balancer.build_timeout)
 
         hm_name = data_utils.rand_name("lb_member_hm1-update")
+        delay = 3 if hm_type == const.HEALTH_MONITOR_UDP_CONNECT else 2
         hm_kwargs = {
             const.POOL_ID: pool[const.ID],
             const.NAME: hm_name,
-            const.TYPE: const.HEALTH_MONITOR_HTTP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 3,
             const.MAX_RETRIES: 4,
             const.MAX_RETRIES_DOWN: 5,
-            const.HTTP_METHOD: const.GET,
-            const.URL_PATH: '/',
-            const.EXPECTED_CODES: '200-204',
             const.ADMIN_STATE_UP: False,
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm_kwargs.update({const.HTTP_METHOD: const.GET,
+                              const.URL_PATH: '/',
+                              const.EXPECTED_CODES: '200-204'})
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -698,8 +1404,10 @@
 
         equal_items = [const.NAME, const.TYPE, const.DELAY, const.TIMEOUT,
                        const.MAX_RETRIES, const.MAX_RETRIES_DOWN,
-                       const.HTTP_METHOD, const.URL_PATH, const.EXPECTED_CODES,
                        const.ADMIN_STATE_UP]
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            equal_items = equal_items + [const.HTTP_METHOD, const.URL_PATH,
+                                         const.EXPECTED_CODES]
 
         for item in equal_items:
             self.assertEqual(hm_kwargs[item], hm[item])
@@ -745,11 +1453,12 @@
             const.TIMEOUT: hm_kwargs[const.TIMEOUT] + 1,
             const.MAX_RETRIES: hm_kwargs[const.MAX_RETRIES] + 1,
             const.MAX_RETRIES_DOWN: hm_kwargs[const.MAX_RETRIES_DOWN] + 1,
-            const.HTTP_METHOD: const.POST,
-            const.URL_PATH: '/test',
-            const.EXPECTED_CODES: '201,202',
             const.ADMIN_STATE_UP: not hm_kwargs[const.ADMIN_STATE_UP],
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm_update_kwargs.update({const.HTTP_METHOD: const.POST,
+                                     const.URL_PATH: '/test',
+                                     const.EXPECTED_CODES: '201,202'})
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -782,8 +1491,10 @@
         # Test changed items
         equal_items = [const.NAME, const.DELAY, const.TIMEOUT,
                        const.MAX_RETRIES, const.MAX_RETRIES_DOWN,
-                       const.HTTP_METHOD, const.URL_PATH, const.EXPECTED_CODES,
                        const.ADMIN_STATE_UP]
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            equal_items = equal_items + [const.HTTP_METHOD, const.URL_PATH,
+                                         const.EXPECTED_CODES]
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -797,8 +1508,152 @@
         for item in equal_items:
             self.assertEqual(hm_kwargs[item], hm[item])
 
+    @decorators.idempotent_id('76b3d116-0190-4de8-a58e-8e450a46a621')
+    def test_LC_HTTP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('8e277e7f-49ea-4fcf-98e6-12566cc33846')
+    def test_LC_HTTPS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('c6646a25-b46d-4541-82de-75ee2beef052')
+    def test_LC_PING_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('5cfacffe-63f2-4aa3-856a-9fa3dafa2d33')
+    def test_LC_TCP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('a4055e48-7740-4ff5-b6e2-9e69b1d40dce')
+    def test_LC_TLS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('369bd443-54ec-4071-a279-5ac1ed38c52d')
+    def test_LC_UDP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
     @decorators.idempotent_id('a7bab4ac-340c-4776-ab9d-9fcb66869432')
-    def test_healthmonitor_delete(self):
+    def test_RR_HTTP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('2991664a-9138-4c10-8e30-2cb6a82bb5b4')
+    def test_RR_HTTPS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('31f7c2c2-9174-4538-8dce-35128bc47ce7')
+    def test_RR_PING_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('56e0cd28-3e74-498c-b55b-21078a758d1f')
+    def test_RR_TCP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('e22a02e2-411a-46d5-9a3a-20ff37cbc835')
+    def test_RR_TLS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('9221a59f-3f0f-41e8-b65c-cdbcca1a2eca')
+    def test_RR_UDP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.UDP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('717e447f-d5c8-485a-923b-da83e560273b')
+    def test_SI_HTTP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('2e177a95-4ec8-4847-bd35-706b6452406a')
+    def test_SI_HTTPS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('17127700-dc5f-4546-a6e6-c0b851704836')
+    def test_SI_PING_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('0b1699c2-ff3e-47b6-a1ad-7128465d1233')
+    def test_SI_TCP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('4ad99064-9015-40b3-8d5b-6cc99e2cc8b9')
+    def test_SI_TLS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('a17825ff-e774-4230-9c53-b53bfc355d61')
+    def test_SI_UDP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('142022cc-9be3-4695-9acf-a7576e4b3268')
+    def test_SIP_HTTP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('dfd994b2-511b-433f-95f7-0786f1857090')
+    def test_SIP_HTTPS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('0d08ffc0-6e6a-470d-abed-5c101a828401')
+    def test_SIP_PING_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('ccf4a10c-2e72-4dbf-bc2f-134156eac3e2')
+    def test_SIP_TCP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('aaacc11e-98cd-4322-a7db-7c720eafd2b2')
+    def test_SIP_TLS_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('559eccf4-eb7c-4d23-9dc8-741cc1601fc7')
+    def test_SIP_UDP_healthmonitor_delete(self):
+        self._test_healthmonitor_delete(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    def _test_healthmonitor_delete(self, pool_protocol, pool_algorithm,
+                                   hm_type):
         """Tests healthmonitor create and delete APIs.
 
         * Create a clean pool to use for the healthmonitor.
@@ -807,15 +1662,36 @@
         * Deletes the healthmonitor.
         * Validates the healthmonitor is in the DELETED state.
         """
+        if (pool_algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         pool_name = data_utils.rand_name("lb_member_pool1_hm-delete")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if pool_algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addCleanup(
             self.mem_pool_client.cleanup_pool, pool[const.ID],
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
@@ -827,11 +1703,12 @@
                                 CONF.load_balancer.build_timeout)
 
         hm_name = data_utils.rand_name("lb_member_hm1-delete")
+        delay = 3 if hm_type == const.HEALTH_MONITOR_UDP_CONNECT else 2
         hm_kwargs = {
             const.POOL_ID: pool[const.ID],
             const.NAME: hm_name,
-            const.TYPE: const.HEALTH_MONITOR_TCP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 3,
             const.MAX_RETRIES: 4,
         }
diff --git a/octavia_tempest_plugin/tests/api/v2/test_l7policy.py b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
index 419a20f..a02f52e 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
@@ -30,16 +30,6 @@
 
 class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
     """Test the l7policy object API."""
-
-    @classmethod
-    def skip_checks(cls):
-        super(L7PolicyAPITest, cls).skip_checks()
-        if not CONF.loadbalancer_feature_enabled.l7_protocol_enabled:
-            raise cls.skipException(
-                '[loadbalancer-feature-enabled] '
-                '"l7_protocol_enabled" is set to False in the Tempest '
-                'configuration. L7 API tests will be skipped.')
-
     @classmethod
     def resource_setup(cls):
         """Setup resources needed by the tests."""
@@ -87,7 +77,7 @@
         pool_kwargs = {
             const.NAME: pool_name,
             const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: cls.lb_algorithm,
+            const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
             const.LOADBALANCER_ID: cls.lb_id,
         }
 
diff --git a/octavia_tempest_plugin/tests/api/v2/test_l7rule.py b/octavia_tempest_plugin/tests/api/v2/test_l7rule.py
index c2526bf..e44c9f8 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_l7rule.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_l7rule.py
@@ -31,15 +31,6 @@
 class L7RuleAPITest(test_base.LoadBalancerBaseTest):
     """Test the l7rule object API."""
     @classmethod
-    def skip_checks(cls):
-        super(L7RuleAPITest, cls).skip_checks()
-        if not CONF.loadbalancer_feature_enabled.l7_protocol_enabled:
-            raise cls.skipException(
-                '[loadbalancer-feature-enabled] '
-                '"l7_protocol_enabled" is set to False in the Tempest '
-                'configuration. L7 API tests will be skipped.')
-
-    @classmethod
     def resource_setup(cls):
         """Setup resources needed by the tests."""
         super(L7RuleAPITest, cls).resource_setup()
@@ -86,7 +77,7 @@
         pool_kwargs = {
             const.NAME: pool_name,
             const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: cls.lb_algorithm,
+            const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
             const.LISTENER_ID: cls.listener_id,
         }
 
diff --git a/octavia_tempest_plugin/tests/api/v2/test_listener.py b/octavia_tempest_plugin/tests/api/v2/test_listener.py
index db98958..394e4d4 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_listener.py
@@ -42,10 +42,6 @@
                      const.NAME: lb_name}
 
         cls._setup_lb_network_kwargs(lb_kwargs)
-        cls.protocol = const.HTTP
-        lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = lb_feature_enabled.l4_protocol
 
         lb = cls.mem_lb_client.create_loadbalancer(**lb_kwargs)
         cls.lb_id = lb[const.ID]
@@ -64,7 +60,27 @@
             cls.allowed_cidrs = ['2001:db8:a0b:12f0::/64']
 
     @decorators.idempotent_id('88d0ec83-7b08-48d9-96e2-0df1d2f8cd98')
-    def test_listener_create(self):
+    def test_http_listener_create(self):
+        self._test_listener_create(const.HTTP, 8000)
+
+    @decorators.idempotent_id('2cc89237-fc6b-434d-b38e-b3309823e71f')
+    def test_https_listener_create(self):
+        self._test_listener_create(const.HTTPS, 8001)
+
+    @decorators.idempotent_id('45580065-5653-436b-aaff-dc465fa0a542')
+    def test_tcp_listener_create(self):
+        self._test_listener_create(const.TCP, 8002)
+
+    @decorators.idempotent_id('7b53f336-47bc-45ae-bbd7-4342ef0673fc')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_listener_create(self):
+        self._test_listener_create(const.UDP, 8003)
+
+    def _test_listener_create(self, protocol, protocol_port):
         """Tests listener create and basic show APIs.
 
         * Tests that users without the loadbalancer member role cannot
@@ -80,14 +96,10 @@
             const.NAME: listener_name,
             const.DESCRIPTION: listener_description,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 80,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200,
-            const.INSERT_HEADERS: {
-                const.X_FORWARDED_FOR: "true",
-                const.X_FORWARDED_PORT: "true"
-            },
             # Don't test with a default pool -- we'll do that in the scenario,
             # but this will allow us to test that the field isn't mandatory,
             # as well as not conflate pool failures with listener test failures
@@ -97,6 +109,12 @@
             # const.DEFAULT_TLS_CONTAINER_REF: '',
             # const.SNI_CONTAINER_REFS: [],
         }
+        if protocol == const.HTTP:
+            listener_kwargs[const.INSERT_HEADERS] = {
+                const.X_FORWARDED_FOR: "true",
+                const.X_FORWARDED_PORT: "true",
+                const.X_FORWARDED_PROTO: "true",
+            }
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             listener_kwargs.update({
@@ -182,11 +200,14 @@
         else:
             self.assertEqual(const.ONLINE, listener[const.OPERATING_STATUS])
 
-        insert_headers = listener[const.INSERT_HEADERS]
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_FOR]))
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_PORT]))
+        if protocol == const.HTTP:
+            insert_headers = listener[const.INSERT_HEADERS]
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_FOR]))
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PORT]))
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PROTO]))
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.5'):
@@ -198,7 +219,27 @@
             self.assertEqual(self.allowed_cidrs, listener[const.ALLOWED_CIDRS])
 
     @decorators.idempotent_id('cceac303-4db5-4d5a-9f6e-ff33780a5f29')
-    def test_listener_create_on_same_port(self):
+    def test_http_udp_tcp_listener_create_on_same_port(self):
+        self._test_listener_create_on_same_port(const.HTTP, const.UDP,
+                                                const.TCP, 8010)
+
+    @decorators.idempotent_id('930338b8-3029-48a6-89b2-8b062060fe61')
+    def test_http_udp_https_listener_create_on_same_port(self):
+        self._test_listener_create_on_same_port(const.HTTP, const.UDP,
+                                                const.HTTPS, 8011)
+
+    @decorators.idempotent_id('01a21892-008a-4327-b4fd-fbf194ecb1a5')
+    def test_tcp_udp_http_listener_create_on_same_port(self):
+        self._test_listener_create_on_same_port(const.TCP, const.UDP,
+                                                const.HTTP, 8012)
+
+    @decorators.idempotent_id('5da764a4-c03a-46ed-848b-98b9d9fa9089')
+    def test_tcp_udp_https_listener_create_on_same_port(self):
+        self._test_listener_create_on_same_port(const.TCP, const.UDP,
+                                                const.HTTPS, 8013)
+
+    def _test_listener_create_on_same_port(self, protocol1, protocol2,
+                                           protocol3, protocol_port):
         """Tests listener creation on same port number.
 
         * Create a first listener.
@@ -224,8 +265,8 @@
         listener_kwargs = {
             const.NAME: listener_name,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 8080,
+            const.PROTOCOL: protocol1,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200
         }
@@ -251,19 +292,14 @@
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout)
 
-        if self.protocol == const.UDP:
-            protocol = const.TCP
-        else:
-            protocol = const.UDP
-
         # Create a listener on the same port, but with a different protocol
         listener2_name = data_utils.rand_name("lb_member_listener2-create")
 
         listener2_kwargs = {
             const.NAME: listener2_name,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: protocol,
-            const.PROTOCOL_PORT: 8080,
+            const.PROTOCOL: protocol2,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200,
         }
@@ -295,8 +331,8 @@
         listener3_kwargs = {
             const.NAME: listener3_name,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: protocol,
-            const.PROTOCOL_PORT: 8080,
+            const.PROTOCOL: protocol1,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200,
         }
@@ -306,33 +342,45 @@
             self.mem_listener_client.create_listener,
             **listener3_kwargs)
 
-        # Create a listener on the same port, with another protocol over TCP,
-        # only if layer-7 protocols are enabled
-        lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if lb_feature_enabled.l7_protocol_enabled:
-            if self.protocol == const.HTTP:
-                protocol = const.HTTPS
-            else:
-                protocol = const.HTTP
+        # Create a listener on the same port, with another protocol over TCP
+        listener4_name = data_utils.rand_name("lb_member_listener4-create")
 
-            listener4_name = data_utils.rand_name("lb_member_listener4-create")
+        listener4_kwargs = {
+            const.NAME: listener4_name,
+            const.ADMIN_STATE_UP: True,
+            const.PROTOCOL: protocol3,
+            const.PROTOCOL_PORT: protocol_port,
+            const.LOADBALANCER_ID: self.lb_id,
+            const.CONNECTION_LIMIT: 200,
+        }
 
-            listener4_kwargs = {
-                const.NAME: listener4_name,
-                const.ADMIN_STATE_UP: True,
-                const.PROTOCOL: protocol,
-                const.PROTOCOL_PORT: 8080,
-                const.LOADBALANCER_ID: self.lb_id,
-                const.CONNECTION_LIMIT: 200,
-            }
-
-            self.assertRaises(
-                exceptions.Conflict,
-                self.mem_listener_client.create_listener,
-                **listener4_kwargs)
+        self.assertRaises(
+            exceptions.Conflict,
+            self.mem_listener_client.create_listener,
+            **listener4_kwargs)
 
     @decorators.idempotent_id('78ba6eb0-178c-477e-9156-b6775ca7b271')
-    def test_listener_list(self):
+    def test_http_listener_list(self):
+        self._test_listener_list(const.HTTP, 8020)
+
+    @decorators.idempotent_id('61b7c643-f5fa-4471-8f9e-2e0ccdaf5ac7')
+    def test_https_listener_list(self):
+        self._test_listener_list(const.HTTPS, 8030)
+
+    @decorators.idempotent_id('1cd476e2-7788-415e-bcaf-c377acfc9794')
+    def test_tcp_listener_list(self):
+        self._test_listener_list(const.TCP, 8030)
+
+    @decorators.idempotent_id('c08fb77e-b317-4d6f-b430-91f5b27ebac6')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_listener_list(self):
+        self._test_listener_list(const.UDP, 8040)
+
+    def _test_listener_list(self, protocol, protocol_port_base):
         """Tests listener list API and field filtering.
 
         * Create a clean loadbalancer.
@@ -368,8 +416,8 @@
             const.NAME: listener1_name,
             const.DESCRIPTION: listener1_desc,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 80,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port_base,
             const.LOADBALANCER_ID: lb_id,
         }
         if self.mem_listener_client.is_version_supported(
@@ -406,8 +454,8 @@
             const.NAME: listener2_name,
             const.DESCRIPTION: listener2_desc,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 81,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port_base + 1,
             const.LOADBALANCER_ID: lb_id,
         }
         if self.mem_listener_client.is_version_supported(
@@ -444,8 +492,8 @@
             const.NAME: listener3_name,
             const.DESCRIPTION: listener3_desc,
             const.ADMIN_STATE_UP: False,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 82,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port_base + 2,
             const.LOADBALANCER_ID: lb_id,
         }
         if self.mem_listener_client.is_version_supported(
@@ -492,8 +540,7 @@
                 query_params='loadbalancer_id={lb_id}'.format(lb_id=lb_id))
             self.assertEqual(0, len(primary))
 
-        # Test that a user without the lb member role cannot list load
-        # balancers
+        # Test that a user without the lb member role cannot list listeners
         if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
             self.assertRaises(
                 exceptions.Forbidden,
@@ -622,7 +669,27 @@
                                      for listener in list_of_listeners]))
 
     @decorators.idempotent_id('6e299eae-6907-4dfc-89c2-e57709d25d3d')
-    def test_listener_show(self):
+    def test_http_listener_show(self):
+        self._test_listener_show(const.HTTP, 8050)
+
+    @decorators.idempotent_id('aa838646-435f-4a20-8442-519a7a138e7e')
+    def test_https_listener_show(self):
+        self._test_listener_show(const.HTTPS, 8051)
+
+    @decorators.idempotent_id('1fcbbee2-b697-4890-b6bf-d308ac1c94cd')
+    def test_tcp_listener_show(self):
+        self._test_listener_show(const.TCP, 8052)
+
+    @decorators.idempotent_id('1dea3a6b-c95b-4e91-b591-1aa9cbcd0d1d')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_listener_show(self):
+        self._test_listener_show(const.UDP, 8053)
+
+    def _test_listener_show(self, protocol, protocol_port):
         """Tests listener show API.
 
         * Create a fully populated listener.
@@ -637,19 +704,21 @@
             const.NAME: listener_name,
             const.DESCRIPTION: listener_description,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 81,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200,
-            const.INSERT_HEADERS: {
-                const.X_FORWARDED_FOR: "true",
-                const.X_FORWARDED_PORT: "true"
-            },
             # TODO(rm_work): need to finish the rest of this stuff
             # const.DEFAULT_POOL_ID: '',
             # const.DEFAULT_TLS_CONTAINER_REF: '',
             # const.SNI_CONTAINER_REFS: [],
         }
+        if protocol == const.HTTP:
+            listener_kwargs[const.INSERT_HEADERS] = {
+                const.X_FORWARDED_FOR: "true",
+                const.X_FORWARDED_PORT: "true",
+                const.X_FORWARDED_PROTO: "true",
+            }
 
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
@@ -715,11 +784,14 @@
         for item in equal_items:
             self.assertEqual(listener_kwargs[item], listener[item])
 
-        insert_headers = listener[const.INSERT_HEADERS]
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_FOR]))
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_PORT]))
+        if protocol == const.HTTP:
+            insert_headers = listener[const.INSERT_HEADERS]
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_FOR]))
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PORT]))
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PROTO]))
 
         parser.parse(listener[const.CREATED_AT])
         parser.parse(listener[const.UPDATED_AT])
@@ -763,7 +835,27 @@
                 listener[const.ID])
 
     @decorators.idempotent_id('aaae0298-5778-4c7e-a27a-01549a71b319')
-    def test_listener_update(self):
+    def test_http_listener_update(self):
+        self._test_listener_update(const.HTTP, 8060)
+
+    @decorators.idempotent_id('9679b061-2b2c-469f-abd9-26ed140ef001')
+    def test_https_listener_update(self):
+        self._test_listener_update(const.HTTPS, 8061)
+
+    @decorators.idempotent_id('8d933121-db03-4ccc-8b77-4e879064a9ba')
+    def test_tcp_listener_update(self):
+        self._test_listener_update(const.TCP, 8062)
+
+    @decorators.idempotent_id('fd02dbfd-39ce-41c2-b181-54fc7ad91707')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_listener_update(self):
+        self._test_listener_update(const.UDP, 8063)
+
+    def _test_listener_update(self, protocol, protocol_port):
         """Tests listener update and show APIs.
 
         * Create a fully populated listener.
@@ -781,19 +873,22 @@
             const.NAME: listener_name,
             const.DESCRIPTION: listener_description,
             const.ADMIN_STATE_UP: False,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 82,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200,
-            const.INSERT_HEADERS: {
-                const.X_FORWARDED_FOR: "true",
-                const.X_FORWARDED_PORT: "true"
-            },
             # TODO(rm_work): need to finish the rest of this stuff
             # const.DEFAULT_POOL_ID: '',
             # const.DEFAULT_TLS_CONTAINER_REF: '',
             # const.SNI_CONTAINER_REFS: [],
         }
+        if protocol == const.HTTP:
+            listener_kwargs[const.INSERT_HEADERS] = {
+                const.X_FORWARDED_FOR: "true",
+                const.X_FORWARDED_PORT: "true",
+                const.X_FORWARDED_PROTO: "true"
+            }
+
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             listener_kwargs.update({
@@ -840,14 +935,17 @@
         UUID(listener[const.ID])
         # Operating status will be OFFLINE while admin_state_up = False
         self.assertEqual(const.OFFLINE, listener[const.OPERATING_STATUS])
-        self.assertEqual(self.protocol, listener[const.PROTOCOL])
-        self.assertEqual(82, listener[const.PROTOCOL_PORT])
+        self.assertEqual(protocol, listener[const.PROTOCOL])
+        self.assertEqual(protocol_port, listener[const.PROTOCOL_PORT])
         self.assertEqual(200, listener[const.CONNECTION_LIMIT])
-        insert_headers = listener[const.INSERT_HEADERS]
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_FOR]))
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_PORT]))
+        if protocol == const.HTTP:
+            insert_headers = listener[const.INSERT_HEADERS]
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_FOR]))
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PORT]))
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PROTO]))
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             self.assertEqual(1000, listener[const.TIMEOUT_CLIENT_DATA])
@@ -902,15 +1000,17 @@
             const.DESCRIPTION: new_description,
             const.ADMIN_STATE_UP: True,
             const.CONNECTION_LIMIT: 400,
-            const.INSERT_HEADERS: {
-                const.X_FORWARDED_FOR: "false",
-                const.X_FORWARDED_PORT: "false"
-            },
             # TODO(rm_work): need to finish the rest of this stuff
             # const.DEFAULT_POOL_ID: '',
             # const.DEFAULT_TLS_CONTAINER_REF: '',
             # const.SNI_CONTAINER_REFS: [],
         }
+        if protocol == const.HTTP:
+            listener_update_kwargs[const.INSERT_HEADERS] = {
+                const.X_FORWARDED_FOR: "false",
+                const.X_FORWARDED_PORT: "false",
+                const.X_FORWARDED_PROTO: "false"
+            }
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             listener_update_kwargs.update({
@@ -973,11 +1073,14 @@
         else:
             self.assertEqual(const.ONLINE, listener[const.OPERATING_STATUS])
         self.assertEqual(400, listener[const.CONNECTION_LIMIT])
-        insert_headers = listener[const.INSERT_HEADERS]
-        self.assertFalse(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_FOR]))
-        self.assertFalse(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_PORT]))
+        if protocol == const.HTTP:
+            insert_headers = listener[const.INSERT_HEADERS]
+            self.assertFalse(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_FOR]))
+            self.assertFalse(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PORT]))
+            self.assertFalse(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PROTO]))
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             self.assertEqual(2000, listener[const.TIMEOUT_CLIENT_DATA])
@@ -998,7 +1101,27 @@
             self.assertEqual(expected_cidrs, listener[const.ALLOWED_CIDRS])
 
     @decorators.idempotent_id('16f11c82-f069-4592-8954-81b35a98e3b7')
-    def test_listener_delete(self):
+    def test_http_listener_delete(self):
+        self._test_listener_delete(const.HTTP, 8070)
+
+    @decorators.idempotent_id('769526a0-df71-47cd-996e-46484de32223')
+    def test_https_listener_delete(self):
+        self._test_listener_delete(const.HTTPS, 8071)
+
+    @decorators.idempotent_id('f5ca019d-2b33-48f9-9c2d-2ec169b423ca')
+    def test_tcp_listener_delete(self):
+        self._test_listener_delete(const.TCP, 8072)
+
+    @decorators.idempotent_id('86bd9717-e3e9-41e3-86c4-888c64455926')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_listener_delete(self):
+        self._test_listener_delete(const.UDP, 8073)
+
+    def _test_listener_delete(self, protocol, protocol_port):
         """Tests listener create and delete APIs.
 
         * Creates a listener.
@@ -1010,8 +1133,8 @@
 
         listener_kwargs = {
             const.NAME: listener_name,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: 83,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
         }
         listener = self.mem_listener_client.create_listener(**listener_kwargs)
@@ -1059,7 +1182,27 @@
             CONF.load_balancer.check_timeout)
 
     @decorators.idempotent_id('6f14a6c1-945e-43bc-8215-410c8a5edb25')
-    def test_listener_show_stats(self):
+    def test_http_listener_show_stats(self):
+        self._test_listener_show_stats(const.HTTP, 8080)
+
+    @decorators.idempotent_id('f8a43c27-f0a0-496d-a287-1958f337ac04')
+    def test_https_listener_show_stats(self):
+        self._test_listener_show_stats(const.HTTPS, 8081)
+
+    @decorators.idempotent_id('8a999856-f448-498c-b891-21af449b5208')
+    def test_tcp_listener_show_stats(self):
+        self._test_listener_show_stats(const.TCP, 8082)
+
+    @decorators.idempotent_id('a4c1f199-923b-41e4-a134-c91e590e20c4')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_listener_show_stats(self):
+        self._test_listener_show_stats(const.UDP, 8083)
+
+    def _test_listener_show_stats(self, protocol, protocol_port):
         """Tests listener show statistics API.
 
         * Create a listener.
@@ -1075,8 +1218,8 @@
             const.NAME: listener_name,
             const.DESCRIPTION: listener_description,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: const.HTTP,
-            const.PROTOCOL_PORT: 84,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200,
         }
diff --git a/octavia_tempest_plugin/tests/api/v2/test_load_balancer.py b/octavia_tempest_plugin/tests/api/v2/test_load_balancer.py
index 684eddb..0cf3576 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_load_balancer.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_load_balancer.py
@@ -197,7 +197,7 @@
             CONF.load_balancer.lb_build_interval,
             CONF.load_balancer.lb_build_timeout)
 
-    @decorators.idempotent_id('643ef031-c800-45f2-b229-3c8f8b37c829')
+    @decorators.idempotent_id('abd784e3-485f-442a-85da-d91365c6b5dd')
     def test_load_balancer_delete_cascade(self):
         """Tests load balancer create and cascade delete APIs.
 
diff --git a/octavia_tempest_plugin/tests/api/v2/test_member.py b/octavia_tempest_plugin/tests/api/v2/test_member.py
index 583f93e..8bf7810 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_member.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_member.py
@@ -19,6 +19,7 @@
 from dateutil import parser
 from tempest import config
 from tempest.lib.common.utils import data_utils
+from tempest.lib.common.utils import misc
 from tempest.lib import decorators
 from tempest.lib import exceptions
 
@@ -28,6 +29,16 @@
 
 CONF = config.CONF
 
+# Member port numbers need to be unique on the shared pools so generate them
+@misc.singleton
+class MemberPort(object):
+
+    current_port = 8000
+
+    def increment(self):
+        self.current_port += 1
+        return self.current_port
+
 
 class MemberAPITest(test_base.LoadBalancerBaseTest):
     """Test the member object API."""
@@ -41,10 +52,10 @@
         lb_kwargs = {const.PROVIDER: CONF.load_balancer.provider,
                      const.NAME: lb_name}
         cls._setup_lb_network_kwargs(lb_kwargs)
-        cls.protocol = const.HTTP
-        lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = lb_feature_enabled.l4_protocol
+
+        cls.current_listener_port = 8000
+        cls.listener_pool_cache = {}
+        cls.member_port = MemberPort()
 
         lb = cls.mem_lb_client.create_loadbalancer(**lb_kwargs)
         cls.lb_id = lb[const.ID]
@@ -58,18 +69,42 @@
                                 CONF.load_balancer.lb_build_interval,
                                 CONF.load_balancer.lb_build_timeout)
 
+    @classmethod
+    def _listener_pool_create(cls, listener_protocol, pool_protocol,
+                              algorithm):
+        """Setup resources needed by the tests."""
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            cls.mem_listener_client.is_version_supported(
+                cls.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
+        if (listener_protocol == const.UDP and
+                not cls.mem_listener_client.is_version_supported(
+                    cls.api_version, '2.1')):
+            raise cls.skipException('UDP listener support is only available '
+                                    'in Octavia API version 2.1 or newer')
+
+        # Cache listener/pool combinations we have already created as
+        # they can be reused for member test permutations
+        listener_pool_key = listener_protocol + pool_protocol + algorithm
+        pool_id = cls.listener_pool_cache.get(listener_pool_key, None)
+        if pool_id is not None:
+            return pool_id
+
         listener_name = data_utils.rand_name("lb_member_listener1_member")
         listener_kwargs = {
             const.NAME: listener_name,
-            const.PROTOCOL: cls.protocol,
-            const.PROTOCOL_PORT: '80',
+            const.PROTOCOL: listener_protocol,
+            const.PROTOCOL_PORT: cls.current_listener_port,
             const.LOADBALANCER_ID: cls.lb_id,
+            const.CONNECTION_LIMIT: 200
         }
+        cls.current_listener_port += 1
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
-        cls.listener_id = listener[const.ID]
         cls.addClassResourceCleanup(
-            cls.mem_listener_client.cleanup_listener,
-            cls.listener_id,
+            cls.mem_listener_client.cleanup_listener, listener[const.ID],
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -81,16 +116,28 @@
         pool_name = data_utils.rand_name("lb_member_pool1_member")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: cls.protocol,
-            const.LB_ALGORITHM: cls.lb_algorithm,
-            const.LISTENER_ID: cls.listener_id,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
+            const.LISTENER_ID: listener[const.ID]
         }
 
-        pool = cls.mem_pool_client.create_pool(**pool_kwargs)
-        cls.pool_id = pool[const.ID]
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = cls.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         cls.addClassResourceCleanup(
-            cls.mem_pool_client.cleanup_pool,
-            cls.pool_id,
+            cls.mem_pool_client.cleanup_pool, pool[const.ID],
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -98,28 +145,699 @@
                                 const.ACTIVE,
                                 CONF.load_balancer.build_interval,
                                 CONF.load_balancer.build_timeout)
+        cls.listener_pool_cache[listener_pool_key] = pool[const.ID]
+        return pool[const.ID]
 
-    def _create_member_and_get_monitor_status(self, **member_kwargs):
-        monitor = CONF.loadbalancer_feature_enabled.health_monitor_enabled
-        if not monitor:
-            del member_kwargs[const.MONITOR_ADDRESS]
-            del member_kwargs[const.MONITOR_PORT]
-        member = self.mem_member_client.create_member(**member_kwargs)
-        return member, monitor
+    @decorators.idempotent_id('0684575a-0970-4fa8-8006-10c2b39c5f2b')
+    def test_ipv4_HTTP_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
 
-    # Note: This test also covers basic member show API
+    @decorators.idempotent_id('10641ec2-981e-4092-a0d0-89a434506eef')
+    def test_ipv4_HTTP_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('dce70b40-502b-4b1c-8592-180817324ea0')
+    def test_ipv4_HTTPS_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('46555ea6-12a8-4961-b105-bffdead7abcd')
+    def test_ipv4_HTTPS_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('c1a5e297-f38e-4fc8-92a4-4177a37c4794')
+    def test_ipv4_PROXY_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('dfe24159-96a4-4496-888e-e74acd9d390d')
+    def test_ipv4_PROXY_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('2716d05d-6b04-405e-bda9-e79c778eb6dd')
+    def test_ipv4_TCP_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('14ee6e7d-c434-4e2e-947b-1a37d5ffa3bd')
+    def test_ipv4_TCP_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('fb9d0708-e320-45d7-be30-f6e7ea45c644')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('5d36d4a4-3b9c-4d54-af61-5f80080bb040')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(4, pool_id)
+
     @decorators.idempotent_id('0623aa1f-753d-44e7-afa1-017d274eace7')
-    def test_member_ipv4_create(self):
-        self._test_member_create(4)
+    def test_ipv4_HTTP_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
 
-    # Note: This test also covers basic member show API
+    @decorators.idempotent_id('96b709fa-dca3-4780-8de7-fb168d455d76')
+    def test_ipv4_HTTP_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('5354ac6c-653d-43ce-8096-1f9de961de73')
+    def test_ipv4_HTTPS_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('e5c8503a-4bc5-43ad-b0da-3e5c1ef719f7')
+    def test_ipv4_HTTPS_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('b2c8632b-f833-4844-9af3-ffee655be6bf')
+    def test_ipv4_PROXY_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('e565de98-88e2-4529-9730-a66073e31480')
+    def test_ipv4_PROXY_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('9306b599-c8e2-4ce9-b789-9e32d42406c4')
+    def test_ipv4_TCP_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('9bbfec96-a7e5-414d-96d1-710e468b8700')
+    def test_ipv4_TCP_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('ccced84a-994d-4d30-a07a-30fa83e4dde2')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('bc0802dd-633f-42d4-8c6a-b4c70af29870')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('0d9a8b32-0c13-49ea-8dd3-a124ec4ac6f9')
+    def test_ipv4_HTTP_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('57c52d0c-0a62-4988-a02e-2f9f8b440d08')
+    def test_ipv4_HTTP_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('df427d31-9843-4840-9137-6b88c633d329')
+    def test_ipv4_HTTPS_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('e64c28f6-09f5-4fd8-a59e-bcf90975581a')
+    def test_ipv4_HTTPS_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('5c32e2fd-9148-466c-b788-e11d7a48483b')
+    def test_ipv4_PROXY_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('76435fb2-dcb3-4be2-ada9-2dbc375c100b')
+    def test_ipv4_PROXY_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('62db0223-1e44-4d6c-8499-9f72c86d30e3')
+    def test_ipv4_TCP_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('2da54523-cefc-4a44-ab07-c33ffe891bf0')
+    def test_ipv4_TCP_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('9a298318-89a5-416f-b027-af5eda94f813')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('b0455c5e-3702-41d7-8069-6ce55563767c')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('a2dbf216-a974-45e1-822d-859f76c89ed6')
+    def test_ipv4_HTTP_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('926eeed9-ecf4-4d22-9417-ef7a7e0a7788')
+    def test_ipv4_HTTP_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('55da07bc-bf2c-4924-aba3-a03456843e14')
+    def test_ipv4_HTTPS_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('6773a8bd-1c51-4040-84ba-1aa2b6c4280d')
+    def test_ipv4_HTTPS_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('ab462d7c-069d-4b55-b6a7-dd199bde65b3')
+    def test_ipv4_PROXY_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('b1f6f779-2535-4e47-add2-24561545ba59')
+    def test_ipv4_PROXY_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('73673efc-5b70-4394-b831-1d59fe283e7d')
+    def test_ipv4_TCP_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('0a08da1e-84f5-4068-84ec-1312b6b8bee3')
+    def test_ipv4_TCP_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('caf68a97-1911-466c-b392-50b946e2395c')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('bb750dc5-73a8-4722-bf3b-cdafaefe7914')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_ipv4_UDP_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(4, pool_id)
+
+    @decorators.idempotent_id('cd894c4f-2256-405f-aa6e-2f77973c749a')
+    def test_ipv6_HTTP_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('3bbb8554-f757-4673-92e3-8593eef83f19')
+    def test_ipv6_HTTP_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('6056724b-d046-497a-ae31-c02af67d4fbb')
+    def test_ipv6_HTTPS_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('354e44d3-db08-4ba9-8e3e-8c3210542a86')
+    def test_ipv6_HTTPS_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('0f8b924e-dd0b-44f9-92b6-8f3dfb0a720c')
+    def test_ipv6_PROXY_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('d1efbfab-b674-4b78-8014-7ecf7ab464ac')
+    def test_ipv6_PROXY_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('699f1c0d-65ae-40d7-9abd-2cef0a1560b9')
+    def test_ipv6_TCP_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('595255f9-f595-43e7-a398-80dd76719aa8')
+    def test_ipv6_TCP_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('0c2c2d5f-9602-4602-82e7-94a1393c295d')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_LC_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('d41f5b46-ba06-42bf-a320-0fda106a7543')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_LC_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_create(6, pool_id)
+
     @decorators.idempotent_id('141944cc-5e2c-4e83-88f8-f61a6797c9b7')
-    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
-                          'IPv6 testing is disabled')
-    def test_member_ipv6_create(self):
-        self._test_member_create(6)
+    def test_ipv6_HTTP_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
 
-    def _test_member_create(self, ip_version):
+    @decorators.idempotent_id('883db951-adb1-4e05-8369-99f38fde6b3c')
+    def test_ipv6_HTTP_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('364ba4b9-825a-4f92-9bf2-8d76bcba0288')
+    def test_ipv6_HTTPS_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('70ba1deb-d644-437f-af80-4299461b20af')
+    def test_ipv6_HTTPS_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('06facdb2-5b7e-4e8b-810d-8f829c619a6d')
+    def test_ipv6_PROXY_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('4ec5a74b-06bd-4005-8fcc-25d1bced4807')
+    def test_ipv6_PROXY_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('22600457-ffe5-44a0-90b0-da4f48051023')
+    def test_ipv6_TCP_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('57e6f861-5a55-43a5-9cae-a966bd2a48eb')
+    def test_ipv6_TCP_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('4cddcf8a-566e-4a5a-bf81-99026b17f676')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_RR_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('26547c9a-6bbc-429a-9436-e94f2930b9e1')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_RR_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('6759674b-dca0-4a48-b166-3f87dc1cc727')
+    def test_ipv6_HTTP_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('683d9ef4-6af3-48e2-aba4-9f404d493467')
+    def test_ipv6_HTTP_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('3d308996-3522-4c91-9bfd-48fedc2ed2f2')
+    def test_ipv6_HTTPS_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('dc26ce07-d580-4a55-b7cd-1b4f09c13572')
+    def test_ipv6_HTTPS_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('56944f91-bf4b-4e9a-9b05-6207e8184c75')
+    def test_ipv6_PROXY_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('e8f4eb38-8e8b-485a-b70a-b1679ad58b66')
+    def test_ipv6_PROXY_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('fc0e77b1-e115-4ec7-80e3-c00d79932549')
+    def test_ipv6_TCP_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('66ddafd2-ace3-43ea-b78b-78b6b0a4d9eb')
+    def test_ipv6_TCP_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('ccfc1283-9e8e-4aa5-a5d3-1d18d57bec65')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_SI_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('b62c8562-fdbb-4989-a5ae-d9e1c1b76cd5')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_SI_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('d816d324-2434-4812-9b3e-a3f0d4949008')
+    def test_ipv6_HTTP_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('d4cfe315-b6d6-4940-8ff6-5f5252028eec')
+    def test_ipv6_HTTP_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('05684ab0-dff3-41aa-8b42-7f95fd6aa4ab')
+    def test_ipv6_HTTPS_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('ed42872c-1ffc-4210-9f69-5f7eb8ec732f')
+    def test_ipv6_HTTPS_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('ab87132f-5a0e-40a1-9498-9883780d31a9')
+    def test_ipv6_PROXY_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('227d7f40-a224-4e67-8844-2d28abc5171e')
+    def test_ipv6_PROXY_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('b8394de8-a898-4cab-aa0c-f3168d702ee0')
+    def test_ipv6_TCP_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('7a86e00b-90bf-4fd3-8636-ae7264929106')
+    def test_ipv6_TCP_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id)
+
+    @decorators.idempotent_id('5abdfbcd-d1cd-4e6a-b98f-79afea442ad8')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_SIP_alt_monitor_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('63f558b4-d2f8-4e4c-828b-3651e50844b7')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   not CONF.load_balancer.test_with_ipv6))
+    def test_ipv6_UDP_SIP_member_create(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_create(6, pool_id)
+
+    def _test_member_create(self, ip_version, pool_id,
+                            alternate_monitor=False):
         """Tests member create and basic show APIs.
 
         * Tests that users without the loadbalancer member role cannot
@@ -129,6 +847,12 @@
         * Show member details.
         * Validate the show reflects the requested values.
         """
+        if ip_version == 6 and not CONF.load_balancer.test_with_ipv6:
+            raise testtools.TestCase.skipException(
+                'Skipping this test as test_with_ipv6 is not "True" in '
+                'the tempest.conf [load_balancer] section. Testing with '
+                'IPv6 is disabled. :-(')
+
         if ip_version == 4:
             member_address = '192.0.2.1'
             member_monitor_address = '192.0.2.2'
@@ -140,13 +864,16 @@
         member_kwargs = {
             const.NAME: member_name,
             const.ADMIN_STATE_UP: True,
-            const.POOL_ID: self.pool_id,
+            const.POOL_ID: pool_id,
             const.ADDRESS: member_address,
-            const.PROTOCOL_PORT: 80,
+            const.PROTOCOL_PORT: self.member_port.increment(),
             const.WEIGHT: 50,
-            const.MONITOR_ADDRESS: member_monitor_address,
-            const.MONITOR_PORT: 8080,
         }
+
+        if alternate_monitor:
+            member_kwargs[const.MONITOR_ADDRESS] = member_monitor_address
+            member_kwargs[const.MONITOR_PORT] = 8080
+
         if self.mem_member_client.is_version_supported(
                 self.api_version, '2.1'):
             member_kwargs.update({
@@ -172,12 +899,11 @@
                 self.os_primary.member_client.create_member,
                 **member_kwargs)
 
-        member, monitor = self._create_member_and_get_monitor_status(
-            **member_kwargs)
+        member = self.mem_member_client.create_member(**member_kwargs)
 
         self.addClassResourceCleanup(
             self.mem_member_client.cleanup_member,
-            member[const.ID], pool_id=self.pool_id,
+            member[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
 
         waiters.wait_for_status(
@@ -191,7 +917,7 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_id)
+            pool_id=pool_id)
 
         parser.parse(member[const.CREATED_AT])
         parser.parse(member[const.UPDATED_AT])
@@ -209,8 +935,9 @@
             self.assertCountEqual(member_kwargs[const.TAGS],
                                   member[const.TAGS])
 
-        if monitor:
+        if alternate_monitor:
             equal_items += [const.MONITOR_ADDRESS, const.MONITOR_PORT]
+
         if const.SUBNET_ID in member_kwargs:
             equal_items.append(const.SUBNET_ID)
         else:
@@ -219,8 +946,107 @@
         for item in equal_items:
             self.assertEqual(member_kwargs[item], member[item])
 
+    @decorators.idempotent_id('fcc5c6cd-d1c2-4a49-8d26-2268608e59a6')
+    def test_HTTP_LC_member_list(self):
+        self._test_member_list(const.HTTP,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('33450ca2-db09-451a-bd46-6f260bf520f5')
+    def test_HTTPS_LC_member_list(self):
+        self._test_member_list(const.HTTPS,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('c17241d4-5cff-41e2-9742-047647d61546')
+    def test_PROXY_LC_member_list(self):
+        self._test_member_list(const.PROXY,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('d1300e9a-64ba-4d02-baf6-2523439b80d7')
+    def test_TCP_LC_member_list(self):
+        self._test_member_list(const.TCP,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('dffc1dfb-7506-4f81-b1e5-5835b9690079')
+    def test_UDP_LC_member_list(self):
+        self._test_member_list(const.UDP,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
     @decorators.idempotent_id('9ce7ad78-915b-42ce-b0d8-44d88a929f3d')
-    def test_member_list(self):
+    def test_HTTP_RR_member_list(self):
+        self._test_member_list(const.HTTP,
+                               const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('8f6362a1-d98b-4696-b88d-41e1eb4a9f70')
+    def test_HTTPS_RR_member_list(self):
+        self._test_member_list(const.HTTPS,
+                               const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('ce4109f8-3bad-4965-95ec-7170519e4a3f')
+    def test_PROXY_RR_member_list(self):
+        self._test_member_list(const.PROXY,
+                               const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('5c35df5d-8951-4506-905b-502f623cc9e4')
+    def test_TCP_RR_member_list(self):
+        self._test_member_list(const.TCP,
+                               const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('c7de7d31-2910-4864-84df-61a883e916fb')
+    def test_UDP_RR_member_list(self):
+        self._test_member_list(const.UDP,
+                               const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('0d142f26-c9e4-45bf-8cd7-1f5659301047')
+    def test_HTTP_SI_member_list(self):
+        self._test_member_list(const.HTTP,
+                               const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('a2be8c21-c7b1-4c1d-ab39-43042bf75a19')
+    def test_HTTPS_SI_member_list(self):
+        self._test_member_list(const.HTTPS,
+                               const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('1ab3978b-8a37-45d3-8e2c-aab4c2187d43')
+    def test_PROXY_SI_member_list(self):
+        self._test_member_list(const.PROXY,
+                               const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('2da08931-bc4c-4339-b16a-43d40ca7734d')
+    def test_TCP_SI_member_list(self):
+        self._test_member_list(const.TCP,
+                               const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('ea12a6bc-6267-4790-b2b3-cbd6a146533b')
+    def test_UDP_SI_member_list(self):
+        self._test_member_list(const.UDP,
+                               const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('2f9d0974-2e55-49c1-b83a-8bdd6dfdb46c')
+    def test_HTTP_SIP_member_list(self):
+        self._test_member_list(const.HTTP,
+                               const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('d31c5b8b-7ec1-4e78-a821-30e9a1e05139')
+    def test_HTTPS_SIP_member_list(self):
+        self._test_member_list(const.HTTPS,
+                               const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('15f8690b-f345-413c-9b4e-af39d546fbec')
+    def test_PROXY_SIP_member_list(self):
+        self._test_member_list(const.PROXY,
+                               const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('db5769ce-f4b0-4a0f-92a7-4eeed66b6730')
+    def test_TCP_SIP_member_list(self):
+        self._test_member_list(const.TCP,
+                               const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('638811fa-26ce-44f3-8ac7-29cf1ef41838')
+    def test_UDP_SIP_member_list(self):
+        self._test_member_list(const.UDP,
+                               const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    def _test_member_list(self, pool_protocol, algorithm):
         """Tests member list API and field filtering.
 
         * Create a clean pool.
@@ -234,11 +1060,37 @@
         * List the members filtering to one of the three.
         * List the members filtered, one field, and sorted.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+        if (pool_protocol == const.UDP and
+                not self.mem_listener_client.is_version_supported(
+                    self.api_version, '2.1')):
+            raise self.skipException('UDP support is only available '
+                                     'in Octavia API version 2.1 or newer')
+
         pool_name = data_utils.rand_name("lb_member_pool2_member-list")
-        pool = self.mem_pool_client.create_pool(
-            name=pool_name, loadbalancer_id=self.lb_id,
-            protocol=self.protocol,
-            lb_algorithm=self.lb_algorithm)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(
+                name=pool_name, loadbalancer_id=self.lb_id,
+                protocol=pool_protocol,
+                lb_algorithm=algorithm)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         pool_id = pool[const.ID]
         self.addCleanup(
             self.mem_pool_client.cleanup_pool, pool_id,
@@ -488,8 +1340,351 @@
             self.assertTrue(not any(["" in member[const.TAGS]
                                      for member in list_of_members]))
 
+    @decorators.idempotent_id('2674b363-7922-494a-b121-cf415dbbb716')
+    def test_HTTP_LC_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('a99da0e8-0595-49a5-a788-efc37fad2dc2')
+    def test_HTTP_LC_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('c3db94b3-a38c-4a0a-8c53-85888c2e1876')
+    def test_HTTPS_LC_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('5844580e-6d01-42dc-b951-d995c9612167')
+    def test_HTTPS_LC_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('ebc52abf-9133-4922-902c-505f890bb44e')
+    def test_PROXY_LC_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('43f90043-65d4-483a-99ab-564f25acc0d7')
+    def test_PROXY_LC_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('ecec1c11-2c2c-408c-9b4e-01620266dab6')
+    def test_TCP_LC_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('baa9b4ec-1ac5-431f-bae0-f2ef68d1c81a')
+    def test_TCP_LC_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('3a291344-0a88-46fc-9eca-c2c6b9048076')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_LC_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('e53b2d6a-ad3f-46be-b899-56324874ad24')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_LC_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_show(pool_id)
+
     @decorators.idempotent_id('7674ae04-7e92-44ef-9adf-40718d7ec705')
-    def test_member_show(self):
+    def test_HTTP_RR_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('2c4a29f4-be25-416c-9546-9585298cfe4c')
+    def test_HTTP_RR_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('a06a137d-f6d1-44a6-978b-22fe8e23752c')
+    def test_HTTPS_RR_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('bfe7dfea-878e-4e7c-afd8-9860d7282930')
+    def test_HTTPS_RR_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('007c4f0c-8192-4806-9c25-c2f27ea4ba57')
+    def test_PROXY_RR_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('0d6d2875-d1b3-4508-8e17-1c656a5f31ec')
+    def test_PROXY_RR_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('9c968920-1fcc-4a71-8dc9-fdf2ff59af7c')
+    def test_TCP_RR_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('1acd8a34-dd13-411e-bdf3-414b3fcc569d')
+    def test_TCP_RR_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('2d154e0c-4955-4b00-92d5-e9df7b2fbf63')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_RR_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('06de8b15-caf3-4a75-b278-cdfe6208c8db')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_RR_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('6c88a35e-b7c1-4b14-bdae-1a710890555a')
+    def test_HTTP_SI_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('266b60e2-7c3a-4edb-950b-66d57aa64b80')
+    def test_HTTP_SI_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('c2c8ba90-6ade-4fd3-bf12-e15627983917')
+    def test_HTTPS_SI_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('4af933ae-5c6d-4849-af85-e06f7d5a661c')
+    def test_HTTPS_SI_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('383148a5-a9ec-413a-a44c-85c1bbb39729')
+    def test_PROXY_SI_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('518c4d3f-2b5a-4f8a-9c5e-fad15127502e')
+    def test_PROXY_SI_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('9b95b5f2-8823-4019-be86-311a1bde5b20')
+    def test_TCP_SI_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('d8576e58-e8ff-491d-beee-b7c439d2c41c')
+    def test_TCP_SI_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('896acc77-3b73-4565-ad87-9467218b143b')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SI_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('ffc64ff1-ec8c-4201-a295-a179adc0c7e0')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SI_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('7c4fcb3e-a994-4d39-97cc-929c022c001e')
+    def test_HTTP_SIP_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('704d04c3-e639-4dee-b55d-09ebf55f8a0d')
+    def test_HTTP_SIP_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('69de0c60-3e4f-40cf-9bf7-d2b1e6c83715')
+    def test_HTTPS_SIP_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('2d970b1c-c157-4974-b605-b8e08d97e874')
+    def test_HTTPS_SIP_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('28e1e3e7-454b-409d-84c3-1826f82ca9dd')
+    def test_PROXY_SIP_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('189ec327-b1c5-47a8-a843-10963cba0a9c')
+    def test_PROXY_SIP_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('6953a9a4-5fac-4470-bfda-4fafbd67288b')
+    def test_TCP_SIP_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('6d8d546f-8c41-49b9-bd9d-8f8ea3975816')
+    def test_TCP_SIP_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id)
+
+    @decorators.idempotent_id('9c0d4668-5a0e-41b3-b3b4-3d0372fe28af')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SIP_alt_monitor_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('a03d02d0-830c-4aad-a10b-96c47974483c')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SIP_member_show(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_show(pool_id)
+
+    def _test_member_show(self, pool_id, alternate_monitor=False):
         """Tests member show API.
 
         * Create a fully populated member.
@@ -501,13 +1696,15 @@
         member_kwargs = {
             const.NAME: member_name,
             const.ADMIN_STATE_UP: True,
-            const.POOL_ID: self.pool_id,
+            const.POOL_ID: pool_id,
             const.ADDRESS: '192.0.2.1',
-            const.PROTOCOL_PORT: 81,
+            const.PROTOCOL_PORT: self.member_port.increment(),
             const.WEIGHT: 50,
-            const.MONITOR_ADDRESS: '192.0.2.2',
-            const.MONITOR_PORT: 8080,
         }
+        if alternate_monitor:
+            member_kwargs[const.MONITOR_ADDRESS] = '192.0.2.2'
+            member_kwargs[const.MONITOR_PORT] = 8080
+
         if self.mem_member_client.is_version_supported(
                 self.api_version, '2.1'):
             member_kwargs.update({
@@ -517,12 +1714,11 @@
             member_kwargs[const.SUBNET_ID] = self.lb_member_vip_subnet[
                 const.ID]
 
-        member, monitor = self._create_member_and_get_monitor_status(
-            **member_kwargs)
+        member = self.mem_member_client.create_member(**member_kwargs)
 
         self.addClassResourceCleanup(
             self.mem_member_client.cleanup_member,
-            member[const.ID], pool_id=self.pool_id,
+            member[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
 
         waiters.wait_for_status(
@@ -536,7 +1732,7 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_id)
+            pool_id=pool_id)
 
         parser.parse(member[const.CREATED_AT])
         parser.parse(member[const.UPDATED_AT])
@@ -550,8 +1746,9 @@
                 self.api_version, '2.1'):
             equal_items.append(const.BACKUP)
 
-        if monitor:
+        if alternate_monitor:
             equal_items += [const.MONITOR_ADDRESS, const.MONITOR_PORT]
+
         if const.SUBNET_ID in member_kwargs:
             equal_items.append(const.SUBNET_ID)
         else:
@@ -564,13 +1761,13 @@
         if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
             member_client = self.os_roles_lb_admin.member_client
             member_adm = member_client.show_member(
-                member[const.ID], pool_id=self.pool_id)
+                member[const.ID], pool_id=pool_id)
             self.assertEqual(member_name, member_adm[const.NAME])
 
         # Test that a user with cloud admin role can see the member
         if not CONF.load_balancer.RBAC_test_type == const.NONE:
             adm = self.os_admin.member_client.show_member(
-                member[const.ID], pool_id=self.pool_id)
+                member[const.ID], pool_id=pool_id)
             self.assertEqual(member_name, adm[const.NAME])
 
         # Test that a different user, with load balancer member role, cannot
@@ -579,7 +1776,7 @@
             member2_client = self.os_roles_lb_member2.member_client
             self.assertRaises(exceptions.Forbidden,
                               member2_client.show_member,
-                              member[const.ID], pool_id=self.pool_id)
+                              member[const.ID], pool_id=pool_id)
 
         # Test that a user, without the load balancer member role, cannot
         # show members
@@ -587,10 +1784,353 @@
             self.assertRaises(
                 exceptions.Forbidden,
                 self.os_primary.member_client.show_member,
-                member[const.ID], pool_id=self.pool_id)
+                member[const.ID], pool_id=pool_id)
+
+    @decorators.idempotent_id('65680d48-1d49-4959-a7d1-677797e54f6b')
+    def test_HTTP_LC_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('52124440-c95c-48fb-af26-70377bcba7d6')
+    def test_HTTP_LC_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('caf95728-5e9c-4295-bd4a-a15263ba5714')
+    def test_HTTPS_LC_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('d1d98798-20cb-4290-818c-e814911d25e5')
+    def test_HTTPS_LC_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('b22288fa-4e25-4779-bd78-6b4802926457')
+    def test_PROXY_LC_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('5ee3df40-381a-4497-9e31-df82d8c2e514')
+    def test_PROXY_LC_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('b693b5ba-d8e7-4b89-ad6c-41b56cf258f7')
+    def test_TCP_LC_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('0df9463f-de6b-43c1-934f-6523873f3530')
+    def test_TCP_LC_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('60baa2d7-927a-4b58-80b9-a2e5196985ee')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_LC_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('b036a40f-d220-4be6-abc9-8ca8e01b96c3')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_LC_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_update(pool_id)
 
     @decorators.idempotent_id('c07572b8-e853-48f3-a8ea-37fc293a4724')
-    def test_member_update(self):
+    def test_HTTP_RR_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('f83993ce-b053-42ff-9022-612ed67e8db6')
+    def test_HTTP_RR_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('e419d49f-22e4-4331-985e-3a1cc8d0b6b0')
+    def test_HTTPS_RR_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('42ac5187-799a-4714-972c-fea6e1c6a7b2')
+    def test_HTTPS_RR_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('43d30b1d-0f99-4b46-ad37-542e899ceae7')
+    def test_PROXY_RR_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('d5cf47a6-a3bb-4238-a5cf-a74b122edce4')
+    def test_PROXY_RR_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('5a05f8b0-52ca-4ed7-a1a7-c62aee16c960')
+    def test_TCP_RR_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('46e5d4e8-0ecc-40a7-87bd-f9ccdfc9a2d3')
+    def test_TCP_RR_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('7609cd2f-32ac-4488-869a-7e14827df6ef')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_RR_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('171ae461-af7d-4fe1-961d-78376fcc2b3f')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_RR_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('af40e333-caed-4808-a46c-05c977f3cebc')
+    def test_HTTP_SI_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('f63a9deb-4a45-42c4-9aeb-f7c304ecbc16')
+    def test_HTTP_SI_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('27c17512-51b4-49ae-ac92-3e141599cdda')
+    def test_HTTPS_SI_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('cc67064f-becc-4e31-b9e5-b3ea7e78a187')
+    def test_HTTPS_SI_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('2061a0cf-49e0-49b8-af4d-f197cf84ef11')
+    def test_PROXY_SI_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('5946b163-8684-402a-b228-c0648a3e0734')
+    def test_PROXY_SI_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('7a6314dd-83c5-41ee-92f6-e18409ac213d')
+    def test_TCP_SI_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('93818084-a9fb-480d-a7e1-04066ee0e393')
+    def test_TCP_SI_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('523cf4e8-c071-4778-bc89-367a0b8469e6')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SI_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('9f655415-13d6-4ceb-9ea6-9a32baf0e093')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SI_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('5215ecc4-fd47-451a-b073-399bad8b522c')
+    def test_HTTP_SIP_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('7c89fb05-d949-4c0f-8c61-7e55e494c76f')
+    def test_HTTP_SIP_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('edff98be-6208-4f1c-9cd3-376b7ac47f80')
+    def test_HTTPS_SIP_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('a1f214fe-6c09-4298-b03e-7069b615dec2')
+    def test_HTTPS_SIP_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('56a21d8e-825a-4780-a073-41061a0d55ca')
+    def test_PROXY_SIP_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('25a24e27-218b-4dcd-99aa-e9ca9f8163e5')
+    def test_PROXY_SIP_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('d6625773-2665-423d-8500-cf9b1b38b53e')
+    def test_TCP_SIP_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('87a3e3aa-580a-41ca-bc15-8cb2995c9125')
+    def test_TCP_SIP_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id)
+
+    @decorators.idempotent_id('796a2972-38e6-41fc-a885-6316195acd70')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SIP_alt_monitor_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id, alternate_monitor=True)
+
+    @decorators.idempotent_id('2f4efa91-e61d-4dd6-8006-ebfdb00c1246')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SIP_member_update(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_update(pool_id)
+
+    def _test_member_update(self, pool_id, alternate_monitor=False):
         """Tests member show API and field filtering.
 
         * Create a fully populated member.
@@ -605,13 +2145,15 @@
         member_kwargs = {
             const.NAME: member_name,
             const.ADMIN_STATE_UP: False,
-            const.POOL_ID: self.pool_id,
+            const.POOL_ID: pool_id,
             const.ADDRESS: '192.0.2.1',
-            const.PROTOCOL_PORT: 82,
+            const.PROTOCOL_PORT: self.member_port.increment(),
             const.WEIGHT: 50,
-            const.MONITOR_ADDRESS: '192.0.2.2',
-            const.MONITOR_PORT: 8080,
         }
+        if alternate_monitor:
+            member_kwargs[const.MONITOR_ADDRESS] = '192.0.2.2'
+            member_kwargs[const.MONITOR_PORT] = 8080
+
         if self.mem_member_client.is_version_supported(
                 self.api_version, '2.1'):
             member_kwargs.update({
@@ -629,12 +2171,11 @@
             member_kwargs[const.SUBNET_ID] = self.lb_member_vip_subnet[
                 const.ID]
 
-        member, monitor = self._create_member_and_get_monitor_status(
-            **member_kwargs)
+        member = self.mem_member_client.create_member(**member_kwargs)
 
         self.addClassResourceCleanup(
             self.mem_member_client.cleanup_member,
-            member[const.ID], pool_id=self.pool_id,
+            member[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
 
         waiters.wait_for_status(
@@ -648,9 +2189,9 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_id)
+            pool_id=pool_id)
         status = const.OFFLINE
-        if not monitor or CONF.load_balancer.test_with_noop:
+        if CONF.load_balancer.test_with_noop:
             status = const.NO_MONITOR
         member = waiters.wait_for_status(
             self.mem_member_client.show_member,
@@ -658,7 +2199,7 @@
             status,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_id)
+            pool_id=pool_id)
 
         parser.parse(member[const.CREATED_AT])
         parser.parse(member[const.UPDATED_AT])
@@ -676,8 +2217,9 @@
             self.assertCountEqual(member_kwargs[const.TAGS],
                                   member[const.TAGS])
 
-        if monitor:
+        if alternate_monitor:
             equal_items += [const.MONITOR_ADDRESS, const.MONITOR_PORT]
+
         if const.SUBNET_ID in member_kwargs:
             equal_items.append(const.SUBNET_ID)
         else:
@@ -686,25 +2228,17 @@
         for item in equal_items:
             self.assertEqual(member_kwargs[item], member[item])
 
-        if CONF.load_balancer.test_with_noop or not monitor:
-            # Operating status with noop or Driver not supporting Monitors
-            # will stay in NO_MONITOR
-            self.assertEqual(const.NO_MONITOR, member[const.OPERATING_STATUS])
-        else:
-            # Operating status will be OFFLINE while admin_state_up = False
-            self.assertEqual(const.OFFLINE, member[const.OPERATING_STATUS])
-
         # Test that a user, without the load balancer member role, cannot
         # use this command
         if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
             self.assertRaises(
                 exceptions.Forbidden,
                 self.os_primary.member_client.update_member,
-                member[const.ID], pool_id=self.pool_id, admin_state_up=True)
+                member[const.ID], pool_id=pool_id, admin_state_up=True)
 
         # Assert we didn't go into PENDING_*
         member_check = self.mem_member_client.show_member(
-            member[const.ID], pool_id=self.pool_id)
+            member[const.ID], pool_id=pool_id)
         self.assertEqual(const.ACTIVE,
                          member_check[const.PROVISIONING_STATUS])
         self.assertEqual(member_kwargs[const.ADMIN_STATE_UP],
@@ -716,12 +2250,12 @@
             member2_client = self.os_roles_lb_member2.member_client
             self.assertRaises(exceptions.Forbidden,
                               member2_client.update_member,
-                              member[const.ID], pool_id=self.pool_id,
+                              member[const.ID], pool_id=pool_id,
                               admin_state_up=True)
 
         # Assert we didn't go into PENDING_*
         member_check = self.mem_member_client.show_member(
-            member[const.ID], pool_id=self.pool_id)
+            member[const.ID], pool_id=pool_id)
         self.assertEqual(const.ACTIVE,
                          member_check[const.PROVISIONING_STATUS])
         self.assertEqual(member_kwargs[const.ADMIN_STATE_UP],
@@ -747,7 +2281,7 @@
                 const.TAGS: new_tags
             })
 
-        if monitor:
+        if alternate_monitor:
             member_update_kwargs[const.MONITOR_ADDRESS] = '192.0.2.3'
             member_update_kwargs[const.MONITOR_PORT] = member[
                 const.MONITOR_PORT] + 1
@@ -764,18 +2298,15 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_id)
-        if not CONF.load_balancer.test_with_noop:
-            member = waiters.wait_for_status(
-                self.mem_member_client.show_member,
-                member[const.ID], const.OPERATING_STATUS,
-                const.NO_MONITOR,
-                CONF.load_balancer.build_interval,
-                CONF.load_balancer.build_timeout,
-                pool_id=self.pool_id)
-
+            pool_id=pool_id)
         # Operating status will be NO_MONITOR regardless of noop
-        self.assertEqual(const.NO_MONITOR, member[const.OPERATING_STATUS])
+        member = waiters.wait_for_status(
+            self.mem_member_client.show_member,
+            member[const.ID], const.OPERATING_STATUS,
+            const.NO_MONITOR,
+            CONF.load_balancer.build_interval,
+            CONF.load_balancer.build_timeout,
+            pool_id=pool_id)
 
         # Test changed items
         equal_items = [const.NAME, const.ADMIN_STATE_UP, const.WEIGHT]
@@ -789,8 +2320,9 @@
             self.assertCountEqual(member_update_kwargs[const.TAGS],
                                   member[const.TAGS])
 
-        if monitor:
+        if alternate_monitor:
             equal_items += [const.MONITOR_ADDRESS, const.MONITOR_PORT]
+
         for item in equal_items:
             self.assertEqual(member_update_kwargs[item], member[item])
 
@@ -804,19 +2336,265 @@
         for item in equal_items:
             self.assertEqual(member_kwargs[item], member[item])
 
+    @decorators.idempotent_id('8104628d-6f30-4037-ae65-c6f6c1b3af42')
+    def test_HTTP_LC_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('c4331afe-c129-44e8-8388-fcbbd28cf783')
+    def test_HTTP_LC_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('5ed41de6-8994-4ba4-8107-29eab89fab1e')
+    def test_HTTPS_LC_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('dd4d0a10-0473-47a2-8ec5-815fbdf0c5ee')
+    def test_HTTPS_LC_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('7ae0bd6f-d04c-4d53-bb7e-fef0680726db')
+    def test_PROXY_LC_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('de033003-1dcb-4896-ad5d-9e68e31addf0')
+    def test_PROXY_LC_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('1d79ab5a-1110-43f1-bfc3-1cb4e2ab5011')
+    def test_TCP_LC_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('69bd512a-d561-43b1-9a4a-ea7134ee8f9e')
+    def test_TCP_LC_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('b9fadfe2-c3f2-48a4-97a4-04c58c40df87')
+    def test_UDP_LC_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('f9a125e0-84c4-4f7e-8a82-fe84ca3175e5')
+    def test_UDP_LC_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_LEAST_CONNECTIONS,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('5f412e52-8ee0-4ee5-8b0e-0e8fc68279a6')
+    def test_HTTP_RR_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_ROUND_ROBIN)
+
     @decorators.idempotent_id('83e0a9f2-491f-46a8-b3ce-6969d70a4e9f')
-    def test_member_batch_update(self):
+    def test_HTTP_RR_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_ROUND_ROBIN,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('ee622c92-a4b4-41a6-96e3-b3b2429276a2')
+    def test_HTTPS_RR_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('7bed4b1c-f862-45bf-ae30-3b4ad0b48870')
+    def test_HTTPS_RR_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_ROUND_ROBIN,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('82325d1a-ad01-471e-bfb3-b75ca86ae8eb')
+    def test_PROXY_RR_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('098f73c5-e3c1-4dfa-bd7f-c87df90743e6')
+    def test_PROXY_RR_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_ROUND_ROBIN,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('176cd46e-f5b1-47d1-9403-a0246272eea4')
+    def test_TCP_RR_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('e952a399-c817-461e-9e37-fdf7e7b34983')
+    def test_TCP_RR_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_ROUND_ROBIN,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('dec6b06d-6a69-48d9-b7a9-67eb287fe95a')
+    def test_UDP_RR_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('fdf2423c-c312-466a-b021-130a52b5be35')
+    def test_UDP_RR_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_ROUND_ROBIN,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('546ac0c3-4025-4c88-8276-1c05e7198e82')
+    def test_HTTP_SI_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('6c1fe175-8e99-4adf-934d-bee79c89fa02')
+    def test_HTTP_SI_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_SOURCE_IP,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('3e6c76e0-3630-45f0-a674-5d79b662812b')
+    def test_HTTPS_SI_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('59303a97-cc97-441e-b1a6-395271ec2287')
+    def test_HTTPS_SI_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_SOURCE_IP,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('963d7e71-d8b7-4257-9b01-f1d7ab57cbc8')
+    def test_PROXY_SI_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('f55d8bff-ac68-4e3b-8f05-a9bb69bb0881')
+    def test_PROXY_SI_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_SOURCE_IP,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('71d20f78-ffe3-49a3-b0c6-38cd5804f255')
+    def test_TCP_SI_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('fee72f7a-928d-477f-b09b-5a866be717a3')
+    def test_TCP_SI_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_SOURCE_IP,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('086b407e-3ace-47a8-94e4-cf563674ceb6')
+    def test_UDP_SI_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('04259015-b6d7-411b-8c19-f21e05994b7c')
+    def test_UDP_SI_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_SOURCE_IP,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('617028e2-89fb-4e7e-ba62-1a8a7af697ca')
+    def test_HTTP_SIP_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('44a2508d-ecea-4f92-ba66-a64d6d7f12da')
+    def test_HTTP_SIP_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTP,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('794137e5-28c1-4b0e-bc2f-fc030d03a689')
+    def test_HTTPS_SIP_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('c1c8930d-0436-4075-b47a-f3bd263ab8a8')
+    def test_HTTPS_SIP_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.HTTPS,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('1ab05cf2-265e-4291-b17c-19caa0a1b6ff')
+    def test_PROXY_SIP_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('4de187b6-4948-4394-af6b-8828e96d8f3e')
+    def test_PROXY_SIP_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.PROXY,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('1d171080-c7e5-4ee0-83d4-51bb1655cb21')
+    def test_TCP_SIP_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('52328f7a-bec4-4f23-9293-f5f1283c0af9')
+    def test_TCP_SIP_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.TCP,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                       alternate_monitor=True)
+
+    @decorators.idempotent_id('00b3ebda-c28c-471b-bbf8-01de6567b4b5')
+    def test_UDP_SIP_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('e4a357a3-1d07-46f4-a8ff-67a279783b24')
+    def test_UDP_SIP_alt_monitor_member_batch_update(self):
+        self._test_member_batch_update(const.UDP,
+                                       const.LB_ALGORITHM_SOURCE_IP_PORT,
+                                       alternate_monitor=True)
+
+    def _test_member_batch_update(self, pool_protocol, algorithm,
+                                  alternate_monitor=False):
         """Tests member batch update.
 
         * Create two members.
         * Batch update the members so one is deleted, created, and updated
         * Validate the member list is correct.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+        if (pool_protocol == const.UDP and
+                not self.mem_listener_client.is_version_supported(
+                    self.api_version, '2.1')):
+            raise self.skipException('UDP support is only available '
+                                     'in Octavia API version 2.1 or newer')
+
         pool_name = data_utils.rand_name("lb_member_pool3_member-batch")
-        pool = self.mem_pool_client.create_pool(
-            name=pool_name, loadbalancer_id=self.lb_id,
-            protocol=self.protocol,
-            lb_algorithm=self.lb_algorithm)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(
+                name=pool_name, loadbalancer_id=self.lb_id,
+                protocol=pool_protocol,
+                lb_algorithm=algorithm)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         pool_id = pool[const.ID]
         self.addClassResourceCleanup(
             self.mem_pool_client.cleanup_pool, pool_id,
@@ -837,9 +2615,11 @@
             const.ADDRESS: '192.0.2.1',
             const.PROTOCOL_PORT: 80,
             const.WEIGHT: 50,
-            const.MONITOR_ADDRESS: '192.0.2.2',
-            const.MONITOR_PORT: 8080,
         }
+        if alternate_monitor:
+            member1_kwargs[const.MONITOR_ADDRESS] = '192.0.2.2'
+            member1_kwargs[const.MONITOR_PORT] = 8080
+
         if self.mem_member_client.is_version_supported(
                 self.api_version, '2.1'):
             member1_kwargs.update({
@@ -849,8 +2629,7 @@
         if self.lb_member_vip_subnet:
             member1_kwargs[const.SUBNET_ID] = self.lb_member_vip_subnet[
                 const.ID]
-        member1, monitor = self._create_member_and_get_monitor_status(
-            **member1_kwargs)
+        member1 = self.mem_member_client.create_member(**member1_kwargs)
 
         self.addClassResourceCleanup(
             self.mem_member_client.cleanup_member,
@@ -879,9 +2658,10 @@
                 const.BACKUP: True,
             })
 
-        if monitor:
+        if alternate_monitor:
             member2_kwargs[const.MONITOR_ADDRESS] = '192.0.2.4'
             member2_kwargs[const.MONITOR_PORT] = 8081
+
         if self.lb_member_vip_subnet:
             member2_kwargs[const.SUBNET_ID] = self.lb_member_vip_subnet[
                 const.ID]
@@ -913,9 +2693,10 @@
                 const.BACKUP: True,
             })
 
-        if monitor:
+        if alternate_monitor:
             member2_kwargs[const.MONITOR_ADDRESS] = '192.0.2.6'
             member2_kwargs[const.MONITOR_PORT] = 8082
+
         if self.lb_member_vip_subnet:
             member3_kwargs[const.SUBNET_ID] = self.lb_member_vip_subnet[
                 const.ID]
@@ -931,7 +2712,7 @@
             self.assertRaises(
                 exceptions.Forbidden,
                 self.os_primary.member_client.update_members,
-                pool_id=self.pool_id, members_list=batch_update_list)
+                pool_id=pool_id, members_list=batch_update_list)
 
         # Assert we didn't go into PENDING_*
         member_check = self.mem_member_client.show_member(
@@ -970,8 +2751,179 @@
         self.assertEqual(member2_name_update, members[0][const.NAME])
         self.assertEqual(member3_name, members[1][const.NAME])
 
+    @decorators.idempotent_id('8b6574a3-17e8-4950-b24e-66d0c28960d3')
+    def test_HTTP_LC_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('a122557b-4824-4a4f-87f0-6ba5c9ca1e32')
+    def test_HTTPS_LC_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('069b8558-138f-4d6c-a3ec-9e803d5e2a14')
+    def test_PROXY_LC_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('9b2a08cf-c9ae-4f8a-a15c-2acab09a7613')
+    def test_TCP_LC_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('5824438d-cda2-4cea-a7d0-e7f5e5a11cac')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_LC_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_delete(pool_id)
+
     @decorators.idempotent_id('f129ba5e-a16e-4178-924f-6a9c5b8b1589')
-    def test_member_delete(self):
+    def test_HTTP_RR_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('f961105a-9874-4765-b457-3de9f342e226')
+    def test_HTTPS_RR_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('6bca4760-bfb4-4cee-b77f-a77abec3e38e')
+    def test_PROXY_RR_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('f0f5a651-f7f0-40d7-a051-32da07c28252')
+    def test_TCP_RR_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('03424069-302d-4020-996c-0a346a97c847')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_RR_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('87d0eac7-e391-4633-88cb-e691eeeab4fc')
+    def test_HTTP_SI_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('a919aa78-5221-4321-aa26-fcd3432d843c')
+    def test_HTTPS_SI_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('92368eef-d9ce-47d9-b3f2-7624601010a0')
+    def test_PROXY_SI_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('1631c730-f34a-4ae7-91eb-5f5b5052cb55')
+    def test_TCP_SI_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('8e05deee-f385-44d8-a112-2649aeea6006')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SI_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('1c9f9dc5-4ba3-44cd-a840-fd0629abfddd')
+    def test_HTTP_SIP_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTP, pool_protocol=const.HTTP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('93ef7ac1-da00-420d-a367-22e86d968e1c')
+    def test_HTTPS_SIP_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.HTTPS, pool_protocol=const.HTTPS,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('e83b9389-768f-4bcf-a650-17af01243d2b')
+    def test_PROXY_SIP_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.PROXY,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('eca3d41d-21bd-4547-b8b8-8f87867eb4ad')
+    def test_TCP_SIP_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.TCP, pool_protocol=const.TCP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_delete(pool_id)
+
+    @decorators.idempotent_id('23788358-ac5f-46c4-922a-164e6a13fe0d')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=(CONF.load_balancer.provider in const.AMPHORA_PROVIDERS and
+                   CONF.load_balancer.test_with_ipv6))
+    def test_UDP_SIP_member_delete(self):
+        pool_id = self._listener_pool_create(
+            listener_protocol=const.UDP, pool_protocol=const.UDP,
+            algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_delete(pool_id)
+
+    def _test_member_delete(self, pool_id):
         """Tests member create and delete APIs.
 
         * Creates a member.
@@ -981,15 +2933,15 @@
         """
         member_name = data_utils.rand_name("lb_member_member1-delete")
         member_kwargs = {
-            const.POOL_ID: self.pool_id,
+            const.POOL_ID: pool_id,
             const.NAME: member_name,
             const.ADDRESS: '192.0.2.1',
-            const.PROTOCOL_PORT: 83,
+            const.PROTOCOL_PORT: self.member_port.increment(),
         }
         member = self.mem_member_client.create_member(**member_kwargs)
         self.addClassResourceCleanup(
             self.mem_member_client.cleanup_member,
-            member[const.ID], pool_id=self.pool_id,
+            member[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
 
         waiters.wait_for_status(
@@ -1005,7 +2957,7 @@
             self.assertRaises(
                 exceptions.Forbidden,
                 self.os_primary.member_client.delete_member,
-                member[const.ID], pool_id=self.pool_id)
+                member[const.ID], pool_id=pool_id)
 
         # Test that a different user, with the load balancer member role
         # cannot delete this member
@@ -1013,17 +2965,17 @@
             member2_client = self.os_roles_lb_member2.member_client
             self.assertRaises(exceptions.Forbidden,
                               member2_client.delete_member,
-                              member[const.ID], pool_id=self.pool_id)
+                              member[const.ID], pool_id=pool_id)
 
         self.mem_member_client.delete_member(member[const.ID],
-                                             pool_id=self.pool_id)
+                                             pool_id=pool_id)
 
         waiters.wait_for_deleted_status_or_not_found(
             self.mem_member_client.show_member, member[const.ID],
             const.PROVISIONING_STATUS,
             CONF.load_balancer.check_interval,
             CONF.load_balancer.check_timeout,
-            pool_id=self.pool_id)
+            pool_id=pool_id)
 
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer,
diff --git a/octavia_tempest_plugin/tests/api/v2/test_pool.py b/octavia_tempest_plugin/tests/api/v2/test_pool.py
index 88e0119..5567fd7 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_pool.py
@@ -12,6 +12,7 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import testtools
 import time
 from uuid import UUID
 
@@ -40,10 +41,6 @@
         lb_kwargs = {const.PROVIDER: CONF.load_balancer.provider,
                      const.NAME: lb_name}
         cls._setup_lb_network_kwargs(lb_kwargs)
-        cls.protocol = const.HTTP
-        cls.lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not cls.lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = cls.lb_feature_enabled.l4_protocol
 
         lb = cls.mem_lb_client.create_loadbalancer(**lb_kwargs)
         cls.lb_id = lb[const.ID]
@@ -57,35 +54,285 @@
                                 CONF.load_balancer.lb_build_interval,
                                 CONF.load_balancer.lb_build_timeout)
 
-        listener_name = data_utils.rand_name("lb_member_listener1_pool")
-        listener_kwargs = {
-            const.NAME: listener_name,
-            const.PROTOCOL: cls.protocol,
-            const.PROTOCOL_PORT: '80',
-            const.LOADBALANCER_ID: cls.lb_id,
-        }
-        listener = cls.mem_listener_client.create_listener(**listener_kwargs)
-        cls.listener_id = listener[const.ID]
-        cls.addClassResourceCleanup(
-            cls.mem_listener_client.cleanup_listener,
-            cls.listener_id,
-            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+    # Pool with Least Connections algorithm
+    @decorators.idempotent_id('29f1a69d-6a0d-4a85-b178-f50f5b4bdfbc')
+    def test_HTTP_LC_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTP, protocol_port=10,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
 
-        waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
-                                cls.lb_id, const.PROVISIONING_STATUS,
-                                const.ACTIVE,
-                                CONF.load_balancer.build_interval,
-                                CONF.load_balancer.build_timeout)
+    @decorators.idempotent_id('5086402a-2339-4238-bddb-d30508e6cc53')
+    def test_HTTP_LC_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTP,
+                               pool_protocol=const.HTTP, protocol_port=11,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
 
+    @decorators.idempotent_id('eb84fabc-68e6-44f7-955d-4919f045cd08')
+    def test_HTTPS_LC_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTPS, protocol_port=12,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('664fc4d3-70d9-41c5-b3f8-c006726062ae')
+    def test_HTTPS_LC_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTPS,
+                               pool_protocol=const.HTTPS, protocol_port=13,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('08e35f78-a85d-48d2-8ac3-14c5e68b64f7')
+    def test_PROXY_LC_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.PROXY, protocol_port=14,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('044f460b-47ec-4e97-96be-c7ab812bfa16')
+    def test_PROXY_LC_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.PROXY, protocol_port=15,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('8f552da6-38f8-44b8-b69b-072cc1e232a6')
+    def test_TCP_LC_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.TCP, protocol_port=16,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('728b974f-ff59-479b-ada5-de280bbaaf02')
+    def test_TCP_LC_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.TCP, protocol_port=17,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('0cb032d9-e092-476e-9aaf-463eea58fc16')
+    def test_UDP_LC_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.UDP, protocol_port=18,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('9bf3bb05-ee36-47f3-b669-78f06a94035d')
+    def test_UDP_LC_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.UDP,
+                               pool_protocol=const.UDP, protocol_port=19,
+                               algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    # Pool with Round Robin algorithm
     @decorators.idempotent_id('7587fe48-87ba-4538-9f03-190911f100ff')
-    def test_pool_create_standalone(self):
-        self._test_pool_create(has_listener=False)
+    def test_HTTP_RR_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTP, protocol_port=20,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
 
     @decorators.idempotent_id('c9c0df79-f07e-428c-ae57-b9d4078eec79')
-    def test_pool_create_with_listener(self):
-        self._test_pool_create(has_listener=True)
+    def test_HTTP_RR_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTP,
+                               pool_protocol=const.HTTP, protocol_port=21,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
 
-    def _test_pool_create(self, has_listener):
+    @decorators.idempotent_id('cc940a8b-b21c-46f5-9976-d2c8dd73b626')
+    def test_HTTPS_RR_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTPS, protocol_port=22,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('10df2793-63b2-42a3-a5d0-9241a9d700a3')
+    def test_HTTPS_RR_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTPS,
+                               pool_protocol=const.HTTPS, protocol_port=23,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('4d6c124e-73ea-4b32-bd1c-3ff7be2c4e55')
+    def test_PROXY_RR_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.PROXY, protocol_port=24,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('60406086-b0a9-4f55-8f64-df161981443c')
+    def test_PROXY_RR_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.PROXY, protocol_port=25,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('41a5f22e-80e8-4d85-bfd6-2726846ed2ce')
+    def test_TCP_RR_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.TCP, protocol_port=26,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('b76432ae-1aa2-4048-b326-1cbda28415ac')
+    def test_TCP_RR_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.TCP, protocol_port=27,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('c09be35d-8a8b-4abd-8752-2cb4d7d7fab2')
+    def test_UDP_RR_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.UDP, protocol_port=28,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('4fb59ed9-5c44-437e-a5f9-bb01b9ba6a72')
+    def test_UDP_RR_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.UDP,
+                               pool_protocol=const.UDP, protocol_port=29,
+                               algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    # Pool with Source IP algorithm
+    @decorators.idempotent_id('a8b1b41c-5c3c-4c17-a2d4-b7c344520e3d')
+    def test_HTTP_SI_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTP, protocol_port=30,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('0136341c-4622-4f65-a59d-b9983331d627')
+    def test_HTTP_SI_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTP,
+                               pool_protocol=const.HTTP, protocol_port=31,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('d69c5e0b-43b5-4afe-a94a-1a4f93e44a93')
+    def test_HTTPS_SI_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTPS, protocol_port=32,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('04cc43f2-9eab-4552-b8c1-cea9e1325696')
+    def test_HTTPS_SI_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTPS,
+                               pool_protocol=const.HTTPS, protocol_port=33,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('cc42779d-71f8-4a7c-8217-02127be344ce')
+    def test_PROXY_SI_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.PROXY, protocol_port=34,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('c845d8d3-30d7-42c3-8943-9a4582c62e2d')
+    def test_PROXY_SI_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.PROXY, protocol_port=35,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('869b4208-5821-44f5-acf6-4f087c4dd79c')
+    def test_TCP_SI_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.TCP, protocol_port=36,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('4b0be593-b2e4-4704-a347-c36dae76aaad')
+    def test_TCP_SI_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.TCP, protocol_port=37,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('47af95cb-685a-48de-9d17-66108cdfd3fa')
+    def test_UDP_SI_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.UDP, protocol_port=38,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('0115ab0c-e8fd-434a-9448-7fba55a8f27d')
+    def test_UDP_SI_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.UDP,
+                               pool_protocol=const.UDP, protocol_port=39,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    # Pool with Source IP Port algorithm
+    @decorators.idempotent_id('265ba978-a528-429c-9ef7-c36373ee2225')
+    def test_HTTP_SIP_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTP, protocol_port=40,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('a07e2ff0-90f3-43d3-a7ec-5ca93b7f29bf')
+    def test_HTTP_SIP_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTP,
+                               pool_protocol=const.HTTP, protocol_port=41,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('7da08af5-e225-46df-b0b4-a5f1834a5377')
+    def test_HTTPS_SIP_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.HTTPS, protocol_port=42,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('3d52a82e-e488-445a-8599-87e9bb7153eb')
+    def test_HTTPS_SIP_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.HTTPS,
+                               pool_protocol=const.HTTPS, protocol_port=43,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('ad4cb862-fa37-4874-99c1-511cdcd86f91')
+    def test_PROXY_SIP_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.PROXY, protocol_port=44,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('bcf76f26-e801-4ab4-b338-b210457d592e')
+    def test_PROXY_SIP_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.PROXY, protocol_port=45,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('42382080-7fd5-46d7-afd7-d47c880f0397')
+    def test_TCP_SIP_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.TCP, protocol_port=46,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('53687dd3-e076-4e93-b917-93c76a160444')
+    def test_TCP_SIP_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.TCP,
+                               pool_protocol=const.TCP, protocol_port=47,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('02d40127-d46e-4aba-8428-96f6deff3554')
+    def test_UDP_SIP_pool_standalone_create(self):
+        self._test_pool_create(listener_protocol=None,
+                               pool_protocol=const.UDP, protocol_port=48,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('5f1acd3e-305d-40d5-81e9-fe6250411d49')
+    def test_UDP_SIP_pool_with_listener_create(self):
+        self._test_pool_create(listener_protocol=const.UDP,
+                               pool_protocol=const.UDP, protocol_port=49,
+                               algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    # Test with session persistence
+    @decorators.idempotent_id('c8b84032-1c20-4d85-9db2-2fe5b9eff37a')
+    def test_HTTP_RR_app_cookie_pool_with_listener_create(self):
+        self._test_pool_create(
+            listener_protocol=const.HTTP,
+            pool_protocol=const.HTTP, protocol_port=50,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_APP_COOKIE)
+
+    @decorators.idempotent_id('0296cccb-83be-425c-ac6a-828774734d5a')
+    def test_HTTP_RR_http_cookie_pool_with_listener_create(self):
+        self._test_pool_create(
+            listener_protocol=const.HTTP,
+            pool_protocol=const.HTTP, protocol_port=51,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_HTTP_COOKIE)
+
+    @decorators.idempotent_id('882263e6-d50f-47b4-9083-f76c2b92eef0')
+    def test_HTTP_RR_source_IP_pool_with_listener_create(self):
+        self._test_pool_create(
+            listener_protocol=const.HTTP,
+            pool_protocol=const.HTTP, protocol_port=52,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    @decorators.idempotent_id('1d5eed30-86bf-4bf4-87d0-22adee3defa1')
+    def test_UDP_RR_source_ip_pool_with_listener_create(self):
+        self._test_pool_create(
+            listener_protocol=const.UDP,
+            pool_protocol=const.UDP, protocol_port=53,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    def _test_pool_create(self, listener_protocol, pool_protocol,
+                          protocol_port, algorithm, session_persistence=None):
         """Tests pool create and basic show APIs.
 
         * Tests that users without the loadbalancer member role cannot
@@ -94,6 +341,34 @@
         * Show pool details.
         * Validate the show reflects the requested values.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
+        if listener_protocol is not None:
+            listener_name = data_utils.rand_name("lb_member_listener1_pool")
+            listener_kwargs = {
+                const.NAME: listener_name,
+                const.PROTOCOL: listener_protocol,
+                const.PROTOCOL_PORT: protocol_port,
+                const.LOADBALANCER_ID: self.lb_id,
+            }
+            listener = self.mem_listener_client.create_listener(
+                **listener_kwargs)
+            self.addClassResourceCleanup(
+                self.mem_listener_client.cleanup_listener,
+                listener[const.ID],
+                lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+            waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+                                    self.lb_id, const.PROVISIONING_STATUS,
+                                    const.ACTIVE,
+                                    CONF.load_balancer.build_interval,
+                                    CONF.load_balancer.build_timeout)
+
         pool_name = data_utils.rand_name("lb_member_pool1-create")
         pool_description = data_utils.arbitrary_string(size=255)
         pool_sp_cookie_name = 'my_cookie'
@@ -101,8 +376,8 @@
             const.NAME: pool_name,
             const.DESCRIPTION: pool_description,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
         }
 
         if self.mem_lb_client.is_version_supported(self.api_version, '2.5'):
@@ -111,14 +386,22 @@
                 const.TAGS: pool_tags
             })
 
-        if self.lb_feature_enabled.session_persistence_enabled:
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             pool_kwargs[const.SESSION_PERSISTENCE] = {
                 const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
-                const.COOKIE_NAME: pool_sp_cookie_name,
+                const.COOKIE_NAME: pool_sp_cookie_name
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_SOURCE_IP
             }
 
-        if has_listener:
-            pool_kwargs[const.LISTENER_ID] = self.listener_id
+        if listener_protocol is not None:
+            pool_kwargs[const.LISTENER_ID] = listener[const.ID]
         else:
             pool_kwargs[const.LOADBALANCER_ID] = self.lb_id
 
@@ -130,7 +413,21 @@
                 self.os_primary.pool_client.create_pool,
                 **pool_kwargs)
 
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addClassResourceCleanup(
             self.mem_pool_client.cleanup_pool,
             pool[const.ID],
@@ -147,7 +444,8 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout)
-        if has_listener and not CONF.load_balancer.test_with_noop:
+        if (listener_protocol is not None and
+                not CONF.load_balancer.test_with_noop):
             pool = waiters.wait_for_status(
                 self.mem_pool_client.show_pool,
                 pool[const.ID], const.OPERATING_STATUS,
@@ -162,36 +460,125 @@
         parser.parse(pool[const.UPDATED_AT])
         UUID(pool[const.ID])
         # Operating status for a pool without members will be:
-        if has_listener and not CONF.load_balancer.test_with_noop:
+        if (listener_protocol is not None and
+                not CONF.load_balancer.test_with_noop):
             # ONLINE if it is attached to a listener and is a live test
             self.assertEqual(const.ONLINE, pool[const.OPERATING_STATUS])
         else:
             # OFFLINE if it is just on the LB directly or is in noop mode
             self.assertEqual(const.OFFLINE, pool[const.OPERATING_STATUS])
-        self.assertEqual(self.protocol, pool[const.PROTOCOL])
+        self.assertEqual(pool_protocol, pool[const.PROTOCOL])
         self.assertEqual(1, len(pool[const.LOADBALANCERS]))
         self.assertEqual(self.lb_id, pool[const.LOADBALANCERS][0][const.ID])
-        if has_listener:
+        if listener_protocol is not None:
             self.assertEqual(1, len(pool[const.LISTENERS]))
-            self.assertEqual(self.listener_id,
+            self.assertEqual(listener[const.ID],
                              pool[const.LISTENERS][0][const.ID])
         else:
             self.assertEmpty(pool[const.LISTENERS])
-        self.assertEqual(self.lb_algorithm,
-                         pool[const.LB_ALGORITHM])
-        if self.lb_feature_enabled.session_persistence_enabled:
+        self.assertEqual(algorithm, pool[const.LB_ALGORITHM])
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
             self.assertEqual(const.SESSION_PERSISTENCE_APP_COOKIE,
                              pool[const.SESSION_PERSISTENCE][const.TYPE])
             self.assertEqual(pool_sp_cookie_name,
                              pool[const.SESSION_PERSISTENCE][
                                  const.COOKIE_NAME])
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_HTTP_COOKIE,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_SOURCE_IP,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+
         if self.mem_lb_client.is_version_supported(self.api_version, '2.5'):
             self.assertCountEqual(pool_kwargs[const.TAGS],
                                   pool[const.TAGS])
 
+    @decorators.idempotent_id('4b4c8021-f4dd-4826-b825-7e3dc0beaba4')
+    def test_HTTP_LC_pool_list(self):
+        self._test_pool_list(const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
     @decorators.idempotent_id('6959a32e-fb34-4f3e-be68-8880c6450016')
-    def test_pool_list(self):
+    def test_HTTP_RR_pool_list(self):
+        self._test_pool_list(const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('b2cb9879-c1b3-491a-bd20-773bc57625b0')
+    def test_HTTP_SI_pool_list(self):
+        self._test_pool_list(const.HTTP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('871b5a7f-c8f3-4d05-9533-f9498e2465fa')
+    def test_HTTP_SIP_pool_list(self):
+        self._test_pool_list(const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('8ace3b65-7183-4b55-837d-3e7f438ea079')
+    def test_HTTPS_LC_pool_list(self):
+        self._test_pool_list(const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('7e01fa71-34fd-42e6-9db8-4b2a57cda38d')
+    def test_HTTPS_RR_pool_list(self):
+        self._test_pool_list(const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('25d4b0d2-ab46-40ad-afec-1b0afa88a559')
+    def test_HTTPS_SI_pool_list(self):
+        self._test_pool_list(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('250e6bf6-5017-47c9-ae12-1e64515d3bfd')
+    def test_HTTPS_SIP_pool_list(self):
+        self._test_pool_list(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('f3f3565e-a6b3-4541-9fb3-d9900231771b')
+    def test_PROXY_LC_pool_list(self):
+        self._test_pool_list(const.PROXY, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('fb267dae-b4e3-4858-a85e-72ecb1d91eff')
+    def test_PROXY_RR_pool_list(self):
+        self._test_pool_list(const.PROXY, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('42a3e3e3-ad71-418e-a262-628a213a7b03')
+    def test_PROXY_SI_pool_list(self):
+        self._test_pool_list(const.PROXY, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('69d3f811-6ce8-403e-bae9-745d51cb268a')
+    def test_PROXY_SIP_pool_list(self):
+        self._test_pool_list(const.PROXY, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('f3a74c0c-3083-44a5-9938-a245176babcd')
+    def test_TCP_LC_pool_list(self):
+        self._test_pool_list(const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('1a0b616f-ee77-4ac1-bb5f-300c2a10a7f2')
+    def test_TCP_RR_pool_list(self):
+        self._test_pool_list(const.TCP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('ec0fda75-f2d7-4fa6-ba91-c5eb9a7e9874')
+    def test_TCP_SI_pool_list(self):
+        self._test_pool_list(const.TCP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('8ca217d0-705f-4a7a-87c2-752bb1ee88f1')
+    def test_TCP_SIP_pool_list(self):
+        self._test_pool_list(const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('ed26899d-a590-46fc-bf70-27c5a9c59cbf')
+    def test_UDP_LC_pool_list(self):
+        self._test_pool_list(const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('6fb6a6e3-ff65-4f2c-8876-2997d3903cfe')
+    def test_UDP_RR_pool_list(self):
+        self._test_pool_list(const.UDP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('0bb02800-d7c9-4916-a532-ac1ac7b945d0')
+    def test_UDP_SI_pool_list(self):
+        self._test_pool_list(const.UDP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('bdc7df5f-ffdb-48c8-823e-a3b5d76868a0')
+    def test_UDP_SIP_pool_list(self):
+        self._test_pool_list(const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    def _test_pool_list(self, pool_protocol, algorithm):
         """Tests pool list API and field filtering.
 
         * Create a clean loadbalancer.
@@ -205,6 +592,13 @@
         * List the pools filtering to one of the three.
         * List the pools filtered, one field, and sorted.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         lb_name = data_utils.rand_name("lb_member_lb2_pool-list")
         lb = self.mem_lb_client.create_loadbalancer(
             name=lb_name, provider=CONF.load_balancer.provider,
@@ -223,13 +617,12 @@
 
         pool1_name = data_utils.rand_name("lb_member_pool2-list")
         pool1_desc = 'B'
-        pool1_sp_cookie_name = 'my_cookie1'
         pool1_kwargs = {
             const.NAME: pool1_name,
             const.DESCRIPTION: pool1_desc,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: lb_id,
         }
 
@@ -239,13 +632,21 @@
                           "Marketing", "Creativity"]
             pool1_kwargs.update({const.TAGS: pool1_tags})
 
-        if self.lb_feature_enabled.session_persistence_enabled:
-            pool1_kwargs[const.SESSION_PERSISTENCE] = {
-                const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
-                const.COOKIE_NAME: pool1_sp_cookie_name,
-            }
-        pool1 = self.mem_pool_client.create_pool(
-            **pool1_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool1 = self.mem_pool_client.create_pool(**pool1_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addCleanup(
             self.mem_pool_client.cleanup_pool,
             pool1[const.ID],
@@ -268,13 +669,12 @@
 
         pool2_name = data_utils.rand_name("lb_member_pool1-list")
         pool2_desc = 'A'
-        pool2_sp_cookie_name = 'my_cookie2'
         pool2_kwargs = {
             const.NAME: pool2_name,
             const.DESCRIPTION: pool2_desc,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: lb_id,
         }
 
@@ -284,11 +684,6 @@
                           "Soft_skills", "Creativity"]
             pool2_kwargs.update({const.TAGS: pool2_tags})
 
-        if self.lb_feature_enabled.session_persistence_enabled:
-            pool2_kwargs[const.SESSION_PERSISTENCE] = {
-                const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
-                const.COOKIE_NAME: pool2_sp_cookie_name,
-            }
         pool2 = self.mem_pool_client.create_pool(
             **pool2_kwargs)
         self.addCleanup(
@@ -317,9 +712,8 @@
             const.NAME: pool3_name,
             const.DESCRIPTION: pool3_desc,
             const.ADMIN_STATE_UP: False,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
-            # No session persistence, just so there's one test for that
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: lb_id,
         }
 
@@ -471,8 +865,113 @@
             self.assertTrue(not any(["" in pool[const.TAGS]
                                      for pool in list_of_pools]))
 
+    @decorators.idempotent_id('416c72c6-ef63-4e70-b27e-3ed95b93c02d')
+    def test_HTTP_LC_pool_show(self):
+        self._test_pool_show(const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('82568809-bdb9-444b-9790-128d0c328d72')
+    def test_HTTPS_LC_pool_show(self):
+        self._test_pool_show(const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('93e4bc67-ce12-43fb-a50d-97de47c3a63f')
+    def test_PROXY_LC_pool_show(self):
+        self._test_pool_show(const.PROXY, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('ee910c1e-1704-4b41-99c1-0c1f904e577d')
+    def test_TCP_LC_pool_show(self):
+        self._test_pool_show(const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('5f4339f6-0387-44f4-a5f9-e385d44b5ee2')
+    def test_UDP_LC_pool_show(self):
+        self._test_pool_show(const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
     @decorators.idempotent_id('b7932438-1aea-4175-a50c-984fee1c0cad')
-    def test_pool_show(self):
+    def test_HTTP_RR_pool_show(self):
+        self._test_pool_show(const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('a1bffe2f-ce20-4d79-a168-bc930de5edcb')
+    def test_HTTPS_RR_pool_show(self):
+        self._test_pool_show(const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('4cf9fa5c-d8e0-4253-8b79-2eb59e066772')
+    def test_PROXY_RR_pool_show(self):
+        self._test_pool_show(const.PROXY, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('e6e91bb3-76b3-4ff8-ad60-d20ac1e64381')
+    def test_TCP_RR_pool_show(self):
+        self._test_pool_show(const.TCP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('7c4a1c90-7fc2-42ee-ad78-d7c75b5a56d2')
+    def test_UDP_RR_pool_show(self):
+        self._test_pool_show(const.UDP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('af1c1b10-a6ac-4f28-82ba-2c0770903a5c')
+    def test_HTTP_SI_pool_show(self):
+        self._test_pool_show(const.HTTP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('64434a6d-222e-4056-bcc0-335ebe4f03ee')
+    def test_HTTPS_SI_pool_show(self):
+        self._test_pool_show(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('adbb7f3b-7a37-4a8e-a2b4-c3f827dad0ba')
+    def test_PROXY_SI_pool_show(self):
+        self._test_pool_show(const.PROXY, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('55f62d1a-33b0-4263-84af-672a30ee52bd')
+    def test_TCP_SI_pool_show(self):
+        self._test_pool_show(const.TCP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('5b961eee-183d-435c-bdf5-b83ca68c4944')
+    def test_UDP_SI_pool_show(self):
+        self._test_pool_show(const.UDP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('59964666-3dfe-4bad-81e0-bc5a4809c10c')
+    def test_HTTP_SIP_pool_show(self):
+        self._test_pool_show(const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('9f8f855c-cd09-4a74-b5f2-c5c13b59422e')
+    def test_HTTPS_SIP_pool_show(self):
+        self._test_pool_show(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('1194bd18-20bc-43e7-b588-9f78a72e0021')
+    def test_PROXY_SIP_pool_show(self):
+        self._test_pool_show(const.PROXY, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('6003cbe8-73a5-416a-9be0-7aa5699dc157')
+    def test_TCP_SIP_pool_show(self):
+        self._test_pool_show(const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('1400529e-3a0c-4bac-b6ed-669fdd723956')
+    def test_UDP_SIP_pool_show(self):
+        self._test_pool_show(const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    # Test with session persistence
+    @decorators.idempotent_id('6fa12ae6-a61a-43d0-85d7-5367811c9c5a')
+    def test_HTTP_RR_app_cookie_pool_show(self):
+        self._test_pool_show(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_APP_COOKIE)
+
+    @decorators.idempotent_id('4a1b6e2c-c216-4589-9ab6-2cd63217f06a')
+    def test_HTTP_RR_http_cookie_pool_show(self):
+        self._test_pool_show(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_HTTP_COOKIE)
+
+    @decorators.idempotent_id('373f1c80-e51e-4260-b8d8-f6aeb512f81c')
+    def test_HTTP_RR_source_IP_pool_show(self):
+        self._test_pool_show(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    @decorators.idempotent_id('bd732c36-bdaa-4591-bf4e-28268874d22c')
+    def test_UDP_RR_source_IP_pool_show(self):
+        self._test_pool_show(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    def _test_pool_show(self, pool_protocol, algorithm,
+                        session_persistence=None):
         """Tests pool show API.
 
         * Create a fully populated pool.
@@ -480,6 +979,13 @@
         * Validate the show reflects the requested values.
         * Validates that other accounts cannot see the pool.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         pool_name = data_utils.rand_name("lb_member_pool1-show")
         pool_description = data_utils.arbitrary_string(size=255)
         pool_sp_cookie_name = 'my_cookie'
@@ -487,17 +993,40 @@
             const.NAME: pool_name,
             const.DESCRIPTION: pool_description,
             const.ADMIN_STATE_UP: True,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
-        if self.lb_feature_enabled.session_persistence_enabled:
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             pool_kwargs[const.SESSION_PERSISTENCE] = {
                 const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
-                const.COOKIE_NAME: pool_sp_cookie_name,
+                const.COOKIE_NAME: pool_sp_cookie_name
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_SOURCE_IP
             }
 
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addClassResourceCleanup(
             self.mem_pool_client.cleanup_pool,
             pool[const.ID],
@@ -523,19 +1052,27 @@
         UUID(pool[const.ID])
         # Operating status for pools will always be offline without members
         self.assertEqual(const.OFFLINE, pool[const.OPERATING_STATUS])
-        self.assertEqual(self.protocol, pool[const.PROTOCOL])
+        self.assertEqual(pool_protocol, pool[const.PROTOCOL])
         self.assertEqual(1, len(pool[const.LOADBALANCERS]))
         self.assertEqual(self.lb_id, pool[const.LOADBALANCERS][0][const.ID])
         self.assertEmpty(pool[const.LISTENERS])
-        self.assertEqual(self.lb_algorithm,
-                         pool[const.LB_ALGORITHM])
-        if self.lb_feature_enabled.session_persistence_enabled:
+        self.assertEqual(algorithm, pool[const.LB_ALGORITHM])
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
             self.assertEqual(const.SESSION_PERSISTENCE_APP_COOKIE,
                              pool[const.SESSION_PERSISTENCE][const.TYPE])
             self.assertEqual(pool_sp_cookie_name,
                              pool[const.SESSION_PERSISTENCE][
                                  const.COOKIE_NAME])
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_HTTP_COOKIE,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_SOURCE_IP,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
 
         # Test that a user with lb_admin role can see the pool
         if CONF.load_balancer.RBAC_test_type == const.ADVANCED:
@@ -565,8 +1102,116 @@
                 self.os_primary.pool_client.show_pool,
                 pool[const.ID])
 
+    @decorators.idempotent_id('d73755fe-ba3a-4248-9543-8e167a5aa7f4')
+    def test_HTTP_LC_pool_update(self):
+        self._test_pool_update(const.HTTP,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('ca641999-7817-4f8f-a58b-2ccd7a5dca97')
+    def test_HTTPS_LC_pool_update(self):
+        self._test_pool_update(const.HTTPS,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('67a2cbab-f4fc-41c8-93e8-97ddba39c1ab')
+    def test_PROXY_LC_pool_update(self):
+        self._test_pool_update(const.PROXY,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('c9f1ed23-f6d4-4d44-9d22-bdc1fbe5854d')
+    def test_TCP_LC_pool_update(self):
+        self._test_pool_update(const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('8e6b8802-c4a7-43eb-a4e8-9f6bf7899a7d')
+    def test_UDP_LC_pool_update(self):
+        self._test_pool_update(const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
     @decorators.idempotent_id('7bd0a6bf-57b4-46a6-83ef-f9991896658a')
-    def test_pool_update(self):
+    def test_HTTP_RR_pool_update(self):
+        self._test_pool_update(const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('ac097c2e-4f79-4714-8de4-517598d37919')
+    def test_HTTPS_RR_pool_update(self):
+        self._test_pool_update(const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('4392bc21-c18e-4e25-bb7e-2c9e3777d784')
+    def test_PROXY_RR_pool_update(self):
+        self._test_pool_update(const.PROXY, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('f6a5970d-2f27-419b-a0ee-7a420ee7b396')
+    def test_TCP_RR_pool_update(self):
+        self._test_pool_update(const.TCP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('a1cded67-9fd6-4155-8761-ce165d518b47')
+    def test_UDP_RR_pool_update(self):
+        self._test_pool_update(const.UDP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('42a742d2-ef9c-47fd-8585-5588bb867431')
+    def test_HTTP_SI_pool_update(self):
+        self._test_pool_update(const.HTTP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('3b4e6462-4e0c-4c05-bc30-d6f86f67bb60')
+    def test_HTTPS_SI_pool_update(self):
+        self._test_pool_update(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('d2cb7a0a-8268-46bc-a519-08474c42c4ca')
+    def test_PROXY_SI_pool_update(self):
+        self._test_pool_update(const.PROXY, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('84e63663-0bf7-45bc-a4d9-b3bbd664fd8c')
+    def test_TCP_SI_pool_update(self):
+        self._test_pool_update(const.TCP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('cdb230b9-996a-4933-a7a2-a7b09465c18c')
+    def test_UDP_SI_pool_update(self):
+        self._test_pool_update(const.UDP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('7c770af5-782e-453c-bd2e-41ec90b37907')
+    def test_HTTP_SIP_pool_update(self):
+        self._test_pool_update(const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('72cee49d-36d5-4b54-8883-8fe7be9fd0f0')
+    def test_HTTPS_SIP_pool_update(self):
+        self._test_pool_update(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('f269293c-b6fa-4fe2-82a6-57fc8ef89260')
+    def test_PROXY_SIP_pool_update(self):
+        self._test_pool_update(const.PROXY, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('f5445a86-247f-4075-8b71-e4e5415d0bed')
+    def test_TCP_SIP_pool_update(self):
+        self._test_pool_update(const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('d9e1aeee-21c3-4b0f-9685-834768597607')
+    def test_UDP_SIP_pool_update(self):
+        self._test_pool_update(const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    # Test with session persistence
+    @decorators.idempotent_id('8677a512-77e1-4af3-96f7-8a3d66725e08')
+    def test_HTTP_RR_app_cookie_pool_update(self):
+        self._test_pool_update(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_APP_COOKIE)
+
+    @decorators.idempotent_id('4d3b3a4a-a652-4dca-9a49-b14471ce5309')
+    def test_HTTP_RR_http_cookie_pool_update(self):
+        self._test_pool_update(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_HTTP_COOKIE)
+
+    @decorators.idempotent_id('2e7bbf67-ed32-4a3c-b5ae-1aff8b07bacc')
+    def test_HTTP_RR_source_IP_pool_update(self):
+        self._test_pool_update(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    @decorators.idempotent_id('28b90650-a612-4b10-981f-d3dd6a366e4f')
+    def test_UDP_RR_source_IP_pool_update(self):
+        self._test_pool_update(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    def _test_pool_update(self, pool_protocol, algorithm,
+                          session_persistence=None):
         """Tests pool update and show APIs.
 
         * Create a fully populated pool.
@@ -577,6 +1222,13 @@
         * Show pool details.
         * Validate the show reflects the updated values.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         pool_name = data_utils.rand_name("lb_member_pool1-update")
         pool_description = data_utils.arbitrary_string(size=255)
         pool_sp_cookie_name = 'my_cookie'
@@ -584,8 +1236,8 @@
             const.NAME: pool_name,
             const.DESCRIPTION: pool_description,
             const.ADMIN_STATE_UP: False,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
 
@@ -595,12 +1247,35 @@
                 const.TAGS: pool_tags
             })
 
-        if self.lb_feature_enabled.session_persistence_enabled:
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             pool_kwargs[const.SESSION_PERSISTENCE] = {
                 const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
-                const.COOKIE_NAME: pool_sp_cookie_name,
+                const.COOKIE_NAME: pool_sp_cookie_name
             }
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_SOURCE_IP
+            }
+
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addClassResourceCleanup(
             self.mem_pool_client.cleanup_pool,
             pool[const.ID],
@@ -626,19 +1301,27 @@
         UUID(pool[const.ID])
         # Operating status for pools will always be offline without members
         self.assertEqual(const.OFFLINE, pool[const.OPERATING_STATUS])
-        self.assertEqual(self.protocol, pool[const.PROTOCOL])
+        self.assertEqual(pool_protocol, pool[const.PROTOCOL])
         self.assertEqual(1, len(pool[const.LOADBALANCERS]))
         self.assertEqual(self.lb_id, pool[const.LOADBALANCERS][0][const.ID])
         self.assertEmpty(pool[const.LISTENERS])
-        self.assertEqual(self.lb_algorithm,
-                         pool[const.LB_ALGORITHM])
-        if self.lb_feature_enabled.session_persistence_enabled:
+        self.assertEqual(algorithm, pool[const.LB_ALGORITHM])
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
             self.assertEqual(const.SESSION_PERSISTENCE_APP_COOKIE,
                              pool[const.SESSION_PERSISTENCE][const.TYPE])
             self.assertEqual(pool_sp_cookie_name,
                              pool[const.SESSION_PERSISTENCE][
                                  const.COOKIE_NAME])
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_HTTP_COOKIE,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_SOURCE_IP,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
 
         # Test that a user, without the load balancer member role, cannot
         # use this command
@@ -677,11 +1360,13 @@
         new_name = data_utils.rand_name("lb_member_pool1-UPDATED")
         new_description = data_utils.arbitrary_string(size=255,
                                                       base_text='new')
+        # We have to set it to the same protocol as not all
+        # drivers support more than one pool algorithm
         pool_update_kwargs = {
             const.NAME: new_name,
             const.DESCRIPTION: new_description,
             const.ADMIN_STATE_UP: True,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.LB_ALGORITHM: algorithm,
         }
 
         if self.mem_lb_client.is_version_supported(self.api_version, '2.5'):
@@ -690,10 +1375,22 @@
                 const.TAGS: new_tags
             })
 
-        if self.lb_feature_enabled.session_persistence_enabled:
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             pool_update_kwargs[const.SESSION_PERSISTENCE] = {
-                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE,
+                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE
             }
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            pool_update_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
+                const.COOKIE_NAME: pool_sp_cookie_name
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            # Some protocols only support source IP session persistence
+            # so set this to the same.
+            pool_update_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_SOURCE_IP
+            }
+
         pool = self.mem_pool_client.update_pool(
             pool[const.ID], **pool_update_kwargs)
 
@@ -712,21 +1409,34 @@
         self.assertEqual(new_name, pool[const.NAME])
         self.assertEqual(new_description, pool[const.DESCRIPTION])
         self.assertTrue(pool[const.ADMIN_STATE_UP])
-        self.assertEqual(self.lb_algorithm,
-                         pool[const.LB_ALGORITHM])
-        if self.lb_feature_enabled.session_persistence_enabled:
+        self.assertEqual(algorithm, pool[const.LB_ALGORITHM])
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
             self.assertEqual(const.SESSION_PERSISTENCE_HTTP_COOKIE,
                              pool[const.SESSION_PERSISTENCE][const.TYPE])
             self.assertIsNone(
                 pool[const.SESSION_PERSISTENCE].get(const.COOKIE_NAME))
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_APP_COOKIE,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+            self.assertEqual(pool_sp_cookie_name,
+                             pool[const.SESSION_PERSISTENCE][
+                                 const.COOKIE_NAME])
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_SOURCE_IP,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+            self.assertIsNone(
+                pool[const.SESSION_PERSISTENCE].get(const.COOKIE_NAME))
 
         if self.mem_lb_client.is_version_supported(self.api_version, '2.5'):
             self.assertCountEqual(pool_update_kwargs[const.TAGS],
                                   pool[const.TAGS])
 
         # Also test removing a Session Persistence
-        if self.lb_feature_enabled.session_persistence_enabled:
+        if session_persistence is not None:
             pool_update_kwargs = {
                 const.SESSION_PERSISTENCE: None,
             }
@@ -744,11 +1454,119 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout)
-        if self.lb_feature_enabled.session_persistence_enabled:
+        if session_persistence is not None:
             self.assertIsNone(pool.get(const.SESSION_PERSISTENCE))
 
+    @decorators.idempotent_id('008088c8-696e-47ba-bc18-75827fe5956b')
+    def test_HTTP_LC_pool_delete(self):
+        self._test_pool_delete(const.HTTP,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('c4b2dad0-378f-4f85-a1f3-597de609b0f3')
+    def test_HTTPS_LC_pool_delete(self):
+        self._test_pool_delete(const.HTTPS,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('d906c63e-6090-422c-9627-e5a971e1665c')
+    def test_PROXY_LC_pool_delete(self):
+        self._test_pool_delete(const.PROXY,
+                               const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('3bfd2d96-813b-48af-86e0-97361873a68a')
+    def test_TCP_LC_pool_delete(self):
+        self._test_pool_delete(const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('ea58bede-1934-480b-86fc-665b872fc946')
+    def test_UDP_LC_pool_delete(self):
+        self._test_pool_delete(const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
     @decorators.idempotent_id('35ed3800-7a4a-47a6-9b94-c1033fff1112')
-    def test_pool_delete(self):
+    def test_HTTP_RR_pool_delete(self):
+        self._test_pool_delete(const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('c305126b-3ead-4ea8-a886-77d355c0d4a2')
+    def test_HTTPS_RR_pool_delete(self):
+        self._test_pool_delete(const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('729be91c-82c5-4b4e-9feb-08a1c786488b')
+    def test_PROXY_RR_pool_delete(self):
+        self._test_pool_delete(const.PROXY, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('a455dea2-19ce-435c-90ae-e143fe84245e')
+    def test_TCP_RR_pool_delete(self):
+        self._test_pool_delete(const.TCP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('63559694-6b38-4bad-9f10-3675131b28c0')
+    def test_UDP_RR_pool_delete(self):
+        self._test_pool_delete(const.UDP, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('06bec76b-8fbf-4be8-9350-92590ac48606')
+    def test_HTTP_SI_pool_delete(self):
+        self._test_pool_delete(const.HTTP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('4b21149e-64f8-4e5f-8f71-020abbd0d0eb')
+    def test_HTTPS_SI_pool_delete(self):
+        self._test_pool_delete(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('277fba8f-d72b-47f0-9723-5e013f53fb7a')
+    def test_PROXY_SI_pool_delete(self):
+        self._test_pool_delete(const.PROXY, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('8411295f-aec0-40ab-a25d-a4677c711d98')
+    def test_TCP_SI_pool_delete(self):
+        self._test_pool_delete(const.TCP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('33322f21-12cc-4f2e-b406-0c11b05a1c6e')
+    def test_UDP_SI_pool_delete(self):
+        self._test_pool_delete(const.UDP, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('9b40351e-1140-4b98-974c-46bd1a19763d')
+    def test_HTTP_SIP_pool_delete(self):
+        self._test_pool_delete(const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('391a4ed7-be97-4231-8198-5c2802bc6e30')
+    def test_HTTPS_SIP_pool_delete(self):
+        self._test_pool_delete(const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('1f6b6b9c-96c6-420b-bc51-8568c081a1ee')
+    def test_PROXY_SIP_pool_delete(self):
+        self._test_pool_delete(const.PROXY, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('fe54f133-865a-4613-9cf0-0469c780f53e')
+    def test_TCP_SIP_pool_delete(self):
+        self._test_pool_delete(const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('7cac0520-c7ea-49cb-8b38-0e309af2ea53')
+    def test_UDP_SIP_pool_delete(self):
+        self._test_pool_delete(const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    # Test with session persistence
+    @decorators.idempotent_id('f9aa5a8c-4e2a-4029-8581-2980f1d111cf')
+    def test_HTTP_RR_app_cookie_pool_delete(self):
+        self._test_pool_delete(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_APP_COOKIE)
+
+    @decorators.idempotent_id('12a31fb5-85fc-4ec8-9475-079dc06f358b')
+    def test_HTTP_RR_http_cookie_pool_delete(self):
+        self._test_pool_delete(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_HTTP_COOKIE)
+
+    @decorators.idempotent_id('07528fe6-12a6-4fca-8819-9980e9d3db84')
+    def test_HTTP_RR_source_IP_pool_delete(self):
+        self._test_pool_delete(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    @decorators.idempotent_id('cc69c0d0-9191-4faf-a154-e33df880f44e')
+    def test_UDP_RR_source_IP_pool_delete(self):
+        self._test_pool_delete(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    def _test_pool_delete(self, pool_protocol, algorithm,
+                          session_persistence=None):
         """Tests pool create and delete APIs.
 
         * Creates a pool.
@@ -756,20 +1574,51 @@
         * Deletes the pool.
         * Validates the pool is in the DELETED state.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
         pool_name = data_utils.rand_name("lb_member_pool1-delete")
         pool_sp_cookie_name = 'my_cookie'
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: self.lb_id,
         }
-        if self.lb_feature_enabled.session_persistence_enabled:
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             pool_kwargs[const.SESSION_PERSISTENCE] = {
                 const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
-                const.COOKIE_NAME: pool_sp_cookie_name,
+                const.COOKIE_NAME: pool_sp_cookie_name
             }
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_SOURCE_IP
+            }
+
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addClassResourceCleanup(
             self.mem_pool_client.cleanup_pool,
             pool[const.ID],
diff --git a/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py b/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
index 82b7019..9e32b80 100644
--- a/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
+++ b/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
@@ -42,11 +42,6 @@
     @classmethod
     def skip_checks(cls):
         super(TLSWithBarbicanTest, cls).skip_checks()
-        if not CONF.loadbalancer_feature_enabled.l7_protocol_enabled:
-            raise cls.skipException(
-                '[loadbalancer_feature_enabled] "l7_protocol_enabled" is '
-                'False in the tempest configuration. TLS tests will be '
-                'skipped.')
         if not CONF.loadbalancer_feature_enabled.terminated_tls_enabled:
             raise cls.skipException(
                 '[loadbalancer-feature-enabled] "terminated_tls_enabled" is '
@@ -309,8 +304,8 @@
 
         # Test HTTPS listener load balancing.
         # Note: certificate validation tests will follow this test
-        self.check_members_balanced(self.lb_vip_address, protocol='https',
-                                    verify=False, protocol_port=443)
+        self.check_members_balanced(self.lb_vip_address, protocol=const.HTTPS,
+                                    HTTPS_verify=False, protocol_port=443)
 
         def _verify_cb(connection, x509, errno, errdepth, retcode):
             """Callback for certificate validation."""
@@ -395,8 +390,8 @@
 
         # Test HTTPS listener load balancing.
         # Note: certificate validation tests will follow this test
-        self.check_members_balanced(self.lb_vip_address, protocol='https',
-                                    verify=False, protocol_port=443)
+        self.check_members_balanced(self.lb_vip_address, protocol=const.HTTPS,
+                                    HTTPS_verify=False, protocol_port=443)
 
         # Test HTTP listener load balancing.
         self.check_members_balanced(self.lb_vip_address)
@@ -430,8 +425,8 @@
 
         # Test HTTPS listener load balancing.
         # Note: certificate validation tests will follow this test
-        self.check_members_balanced(self.lb_vip_address, protocol='https',
-                                    verify=False, protocol_port=443)
+        self.check_members_balanced(self.lb_vip_address, protocol=const.HTTPS,
+                                    HTTPS_verify=False, protocol_port=443)
 
         def _verify_server_cb(connection, x509, errno, errdepth, retcode):
             return _verify_cb(connection, x509, errno, errdepth, retcode,
@@ -563,8 +558,8 @@
 
         # Test HTTPS listener load balancing.
         # Note: certificate validation tests will follow this test
-        self.check_members_balanced(self.lb_vip_address, protocol='https',
-                                    verify=False, protocol_port=443)
+        self.check_members_balanced(self.lb_vip_address, protocol=const.HTTPS,
+                                    HTTPS_verify=False, protocol_port=443)
 
         listener2_name = data_utils.rand_name("lb_member_listener2-tls-sni")
         listener2_kwargs = {
@@ -591,8 +586,8 @@
 
         # Test HTTPS listener load balancing.
         # Note: certificate validation tests will follow this test
-        self.check_members_balanced(self.lb_vip_address, protocol='https',
-                                    verify=False, protocol_port=8443)
+        self.check_members_balanced(self.lb_vip_address, protocol=const.HTTPS,
+                                    HTTPS_verify=False, protocol_port=8443)
 
         def _verify_server_cb(connection, x509, errno, errdepth, retcode):
             return _verify_cb(connection, x509, errno, errdepth, retcode,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py b/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
index fa39c01..e67aed6 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
@@ -12,12 +12,14 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import testtools
 from uuid import UUID
 
 from dateutil import parser
 from tempest import config
 from tempest.lib.common.utils import data_utils
 from tempest.lib import decorators
+from tempest.lib import exceptions
 
 from octavia_tempest_plugin.common import constants as const
 from octavia_tempest_plugin.tests import test_base
@@ -29,12 +31,6 @@
 class HealthMonitorScenarioTest(test_base.LoadBalancerBaseTest):
 
     @classmethod
-    def skip_checks(cls):
-        super(HealthMonitorScenarioTest, cls).skip_checks()
-        if not CONF.loadbalancer_feature_enabled.health_monitor_enabled:
-            raise cls.skipException('Health Monitors not supported')
-
-    @classmethod
     def resource_setup(cls):
         """Setup resources needed by the tests."""
         super(HealthMonitorScenarioTest, cls).resource_setup()
@@ -57,28 +53,199 @@
                                 CONF.load_balancer.lb_build_interval,
                                 CONF.load_balancer.lb_build_timeout)
 
-        pool_name = data_utils.rand_name("lb_member_pool1_hm")
-        pool_kwargs = {
-            const.NAME: pool_name,
-            const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: cls.lb_algorithm,
-            const.LOADBALANCER_ID: cls.lb_id,
-        }
-        pool = cls.mem_pool_client.create_pool(**pool_kwargs)
-        cls.pool_id = pool[const.ID]
-        cls.addClassResourceCleanup(
-            cls.mem_pool_client.cleanup_pool,
-            cls.pool_id,
-            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+    @decorators.idempotent_id('4c2058f9-b8e2-4a5b-a2f3-3bd58a29f63b')
+    def test_LC_HTTP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTP)
 
-        waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
-                                cls.lb_id, const.PROVISIONING_STATUS,
-                                const.ACTIVE,
-                                CONF.load_balancer.build_interval,
-                                CONF.load_balancer.build_timeout)
+    @decorators.idempotent_id('08681eac-e907-4f71-8799-4b8fdf23914a')
+    def test_LC_HTTPS_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('74611ffb-45f8-4cf5-a28c-7cc37879a27b')
+    def test_LC_PING_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('cacec696-10f4-430d-bc9e-2c5f235a3324')
+    def test_LC_TCP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('6becafb2-1e15-4977-bb29-b08f5728d028')
+    def test_LC_TLS_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('fe43ee90-093d-4175-837e-92f803958ef1')
+    def test_LC_UDP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS,
+            const.HEALTH_MONITOR_UDP_CONNECT)
 
     @decorators.idempotent_id('a51e09aa-6e44-4c67-a9e4-df70d0e08f96')
-    def test_healthmonitor_CRUD(self):
+    def test_RR_HTTP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('fef9eabc-9d1e-4ad2-ae3e-05afc8c84c48')
+    def test_RR_HTTPS_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('de01b73d-dba0-4426-9e20-9be3a34cfc44')
+    def test_RR_PING_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('141a121a-8918-4f9c-a070-eaf8ec29008d')
+    def test_RR_TCP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('de80d87a-5479-41c6-8c6b-518cc64ec62d')
+    def test_RR_TLS_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('265d7359-f0a5-4083-92a8-07cb1787fe36')
+    def test_RR_UDP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.UDP, const.LB_ALGORITHM_ROUND_ROBIN,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('20a2905f-2b53-4395-9a7f-1ded67ef4408')
+    def test_SI_HTTP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.HTTP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTP)
+
+    @decorators.idempotent_id('8a8cc776-b68f-4761-9bf9-cae566cdc155')
+    def test_SI_HTTPS_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.HTTPS, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_HTTPS)
+
+    @decorators.idempotent_id('296a445c-5cc8-47a7-ae26-8d548f9712c3')
+    def test_SI_PING_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_PING)
+
+    @decorators.idempotent_id('94be34b1-4dc6-492b-a777-0587626a785f')
+    def test_SI_TCP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TCP)
+
+    @decorators.idempotent_id('0de0e021-fd3c-4f7c-b959-67d758394fd2')
+    def test_SI_TLS_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.TCP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_TLS_HELLO)
+
+    @decorators.idempotent_id('3c79750a-aba6-4838-acbe-bc937ccf2118')
+    def test_SI_UDP_healthmonitor_CRUD(self):
+        self._test_healthmonitor_CRUD(
+            const.UDP, const.LB_ALGORITHM_SOURCE_IP,
+            const.HEALTH_MONITOR_UDP_CONNECT)
+
+    @decorators.idempotent_id('d5e0d1b6-7cce-4592-abce-0ac6bee18818')
+    def test_SIP_HTTP_healthmonitor_CRUD(self):
+        try:
+            self._test_healthmonitor_CRUD(
+                const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+                const.HEALTH_MONITOR_HTTP)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @decorators.idempotent_id('e188daac-6db9-4dc2-8ecb-b47932e1984a')
+    def test_SIP_HTTPS_healthmonitor_CRUD(self):
+        try:
+            self._test_healthmonitor_CRUD(
+                const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT,
+                const.HEALTH_MONITOR_HTTPS)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @decorators.idempotent_id('f9458ffd-5af7-402b-9c15-c061bf2eb9ba')
+    def test_SIP_PING_healthmonitor_CRUD(self):
+        try:
+            self._test_healthmonitor_CRUD(
+                const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+                const.HEALTH_MONITOR_PING)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @decorators.idempotent_id('b4cbe603-0a14-4778-b38c-f330053c86b6')
+    def test_SIP_TCP_healthmonitor_CRUD(self):
+        try:
+            self._test_healthmonitor_CRUD(
+                const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+                const.HEALTH_MONITOR_TCP)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @decorators.idempotent_id('57714d4c-d584-4345-9ceb-becc3ae37b7f')
+    def test_SIP_TLS_healthmonitor_CRUD(self):
+        try:
+            self._test_healthmonitor_CRUD(
+                const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+                const.HEALTH_MONITOR_TLS_HELLO)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @decorators.idempotent_id('cc4abf84-361b-409b-b859-9a860d539deb')
+    def test_SIP_UDP_healthmonitor_CRUD(self):
+        try:
+            self._test_healthmonitor_CRUD(
+                const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT,
+                const.HEALTH_MONITOR_UDP_CONNECT)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    def _test_healthmonitor_CRUD(self, pool_protocol, pool_algorithm, hm_type):
         """Tests healthmonitor create, read, update, delete, and member status
 
         * Create a fully populated healthmonitor.
@@ -86,21 +253,48 @@
         * Update the healthmonitor.
         * Delete the healthmonitor.
         """
+        if (pool_algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
+        pool_name = data_utils.rand_name("lb_member_pool1_hm")
+        pool_kwargs = {
+            const.NAME: pool_name,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: pool_algorithm,
+            const.LOADBALANCER_ID: self.lb_id,
+        }
+        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        self.addClassResourceCleanup(
+            self.mem_pool_client.cleanup_pool, pool[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+        waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+                                self.lb_id, const.PROVISIONING_STATUS,
+                                const.ACTIVE,
+                                CONF.load_balancer.build_interval,
+                                CONF.load_balancer.build_timeout)
+
         # Healthmonitor create
         hm_name = data_utils.rand_name("lb_member_hm1-CRUD")
+        delay = 3 if hm_type == const.HEALTH_MONITOR_UDP_CONNECT else 2
         hm_kwargs = {
-            const.POOL_ID: self.pool_id,
+            const.POOL_ID: pool[const.ID],
             const.NAME: hm_name,
-            const.TYPE: const.HEALTH_MONITOR_HTTP,
-            const.DELAY: 2,
+            const.TYPE: hm_type,
+            const.DELAY: delay,
             const.TIMEOUT: 2,
             const.MAX_RETRIES: 2,
             const.MAX_RETRIES_DOWN: 2,
-            const.HTTP_METHOD: const.GET,
-            const.URL_PATH: '/',
-            const.EXPECTED_CODES: '200',
             const.ADMIN_STATE_UP: True,
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm_kwargs.update({const.HTTP_METHOD: const.GET,
+                              const.URL_PATH: '/',
+                              const.EXPECTED_CODES: '200'})
 
         hm = self.mem_healthmonitor_client.create_healthmonitor(**hm_kwargs)
         self.addCleanup(
@@ -126,8 +320,10 @@
 
         equal_items = [const.NAME, const.TYPE, const.DELAY, const.TIMEOUT,
                        const.MAX_RETRIES, const.MAX_RETRIES_DOWN,
-                       const.HTTP_METHOD, const.URL_PATH, const.EXPECTED_CODES,
                        const.ADMIN_STATE_UP]
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            equal_items = equal_items + [const.HTTP_METHOD, const.URL_PATH,
+                                         const.EXPECTED_CODES]
 
         for item in equal_items:
             self.assertEqual(hm_kwargs[item], hm[item])
@@ -140,11 +336,13 @@
             const.TIMEOUT: hm_kwargs[const.TIMEOUT] + 1,
             const.MAX_RETRIES: hm_kwargs[const.MAX_RETRIES] + 1,
             const.MAX_RETRIES_DOWN: hm_kwargs[const.MAX_RETRIES_DOWN] + 1,
-            const.HTTP_METHOD: const.POST,
-            const.URL_PATH: '/test',
-            const.EXPECTED_CODES: '201,202',
             const.ADMIN_STATE_UP: not hm_kwargs[const.ADMIN_STATE_UP],
         }
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            hm_update_kwargs.update({const.HTTP_METHOD: const.POST,
+                                     const.URL_PATH: '/test',
+                                     const.EXPECTED_CODES: '201,202'})
+
         hm = self.mem_healthmonitor_client.update_healthmonitor(
             hm[const.ID], **hm_update_kwargs)
 
@@ -163,8 +361,10 @@
         # Test changed items
         equal_items = [const.NAME, const.DELAY, const.TIMEOUT,
                        const.MAX_RETRIES, const.MAX_RETRIES_DOWN,
-                       const.HTTP_METHOD, const.URL_PATH, const.EXPECTED_CODES,
                        const.ADMIN_STATE_UP]
+        if hm_type == const.HEALTH_MONITOR_HTTP:
+            equal_items = equal_items + [const.HTTP_METHOD, const.URL_PATH,
+                                         const.EXPECTED_CODES]
 
         for item in equal_items:
             self.assertEqual(hm_update_kwargs[item], hm[item])
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py b/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
index fbfe930..06d10dc 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
@@ -11,12 +11,12 @@
 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 #    License for the specific language governing permissions and limitations
 #    under the License.
-
-import requests
+import testtools
 
 from tempest import config
 from tempest.lib.common.utils import data_utils
 from tempest.lib import decorators
+from tempest.lib import exceptions as tempest_exceptions
 
 from octavia_tempest_plugin.common import constants as const
 from octavia_tempest_plugin.tests import test_base
@@ -69,19 +69,17 @@
         cls.listener_ids = {}
         cls.pool_ids = {}
 
-        cls.protocol = const.HTTP
-        lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = lb_feature_enabled.l4_protocol
-
         # Don't use same ports for HTTP/l4_protocol and UDP because some
         # releases (<=train) don't support it
         cls._listener_pool_create(const.HTTP, 80)
 
+        cls._listener_pool_create(const.TCP, 81)
+
         cls._listener_pool_create(const.UDP, 8080)
 
     @classmethod
-    def _listener_pool_create(cls, protocol, protocol_port):
+    def _listener_pool_create(cls, protocol, protocol_port,
+                              algorithm=const.LB_ALGORITHM_ROUND_ROBIN):
         if (protocol == const.UDP and
                 not cls.mem_listener_client.is_version_supported(
                     cls.api_version, '2.1')):
@@ -103,8 +101,7 @@
             # haproxy process and use haproxy>=1.8:
             const.CONNECTION_LIMIT: 200,
         }
-        listener = cls.mem_listener_client.create_listener(
-            **listener_kwargs)
+        listener = cls.mem_listener_client.create_listener(**listener_kwargs)
         cls.listener_ids[protocol] = listener[const.ID]
         cls.addClassResourceCleanup(
             cls.mem_listener_client.cleanup_listener,
@@ -121,7 +118,7 @@
         pool_kwargs = {
             const.NAME: pool_name,
             const.PROTOCOL: protocol,
-            const.LB_ALGORITHM: cls.lb_algorithm,
+            const.LB_ALGORITHM: algorithm,
             const.LISTENER_ID: cls.listener_ids[protocol],
         }
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
@@ -137,8 +134,8 @@
                                 CONF.load_balancer.build_interval,
                                 CONF.load_balancer.build_timeout)
 
-    def _test_ipv6_vip_mixed_ipv4_ipv6_members_traffic(self, protocol,
-                                                       protocol_port):
+    def _test_ipv6_vip_mixed_ipv4_ipv6_members_traffic(
+            self, protocol, protocol_port, persistent=True):
         """Tests traffic through a loadbalancer with IPv4 and IPv6 members.
 
         * Set up members on a loadbalancer.
@@ -156,8 +153,7 @@
         if self.lb_member_1_subnet:
             member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
 
-        member1 = self.mem_member_client.create_member(
-            **member1_kwargs)
+        member1 = self.mem_member_client.create_member(**member1_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
             member1[const.ID], pool_id=self.pool_ids[protocol],
@@ -181,8 +177,7 @@
             member2_kwargs[const.SUBNET_ID] = (
                 self.lb_member_2_ipv6_subnet[const.ID])
 
-        member2 = self.mem_member_client.create_member(
-            **member2_kwargs)
+        member2 = self.mem_member_client.create_member(**member2_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
             member2[const.ID], pool_id=self.pool_ids[protocol],
@@ -196,11 +191,16 @@
         # Send some traffic
         self.check_members_balanced(self.lb_vip_address,
                                     protocol_port=protocol_port,
-                                    protocol=protocol)
+                                    protocol=protocol, persistent=persistent)
 
     @decorators.idempotent_id('219ac17d-c5c1-4e7e-a9d5-0764d7ce7746')
-    def test_ipv6_vip_mixed_ipv4_ipv6_members_traffic(self):
-        self._test_ipv6_vip_mixed_ipv4_ipv6_members_traffic(self.protocol, 80)
+    def test_http_ipv6_vip_mixed_ipv4_ipv6_members_traffic(self):
+        self._test_ipv6_vip_mixed_ipv4_ipv6_members_traffic(const.HTTP, 80)
+
+    @decorators.idempotent_id('a4e8d5d1-03d5-4252-9300-e89b9b2bdafc')
+    def test_tcp_ipv6_vip_mixed_ipv4_ipv6_members_traffic(self):
+        self._test_ipv6_vip_mixed_ipv4_ipv6_members_traffic(const.TCP, 81,
+                                                            persistent=False)
 
     @decorators.idempotent_id('c468434d-bc84-4bfa-825f-d4761daa0d76')
     # Skipping test for amphora driver until "UDP load balancers cannot mix
@@ -218,7 +218,8 @@
 
         self._test_ipv6_vip_mixed_ipv4_ipv6_members_traffic(const.UDP, 8080)
 
-    def _test_ipv6_vip_ipv6_members_traffic(self, protocol, protocol_port):
+    def _test_ipv6_vip_ipv6_members_traffic(self, protocol, protocol_port,
+                                            persistent=True):
         """Tests traffic through a loadbalancer with IPv6 members.
 
         * Set up members on a loadbalancer.
@@ -238,8 +239,7 @@
             member1_kwargs[const.SUBNET_ID] = (
                 self.lb_member_1_ipv6_subnet[const.ID])
 
-        member1 = self.mem_member_client.create_member(
-            **member1_kwargs)
+        member1 = self.mem_member_client.create_member(**member1_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
             member1[const.ID], pool_id=self.pool_ids[protocol],
@@ -263,8 +263,7 @@
             member2_kwargs[const.SUBNET_ID] = (
                 self.lb_member_2_ipv6_subnet[const.ID])
 
-        member2 = self.mem_member_client.create_member(
-            **member2_kwargs)
+        member2 = self.mem_member_client.create_member(**member2_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
             member2[const.ID], pool_id=self.pool_ids[protocol],
@@ -278,11 +277,16 @@
         # Send some traffic
         self.check_members_balanced(self.lb_vip_address,
                                     protocol_port=protocol_port,
-                                    protocol=protocol)
+                                    protocol=protocol, persistent=persistent)
 
     @decorators.idempotent_id('dd75f41a-5b29-47ad-963d-3434f1056ca3')
-    def test_ipv6_vip_ipv6_members_traffic(self):
-        self._test_ipv6_vip_ipv6_members_traffic(self.protocol, 80)
+    def test_http_ipv6_vip_ipv6_members_traffic(self):
+        self._test_ipv6_vip_ipv6_members_traffic(const.HTTP, 80)
+
+    @decorators.idempotent_id('9bb93619-14cb-45d9-ad60-2f80c201486a')
+    def test_tcp_ipv6_vip_ipv6_members_traffic(self):
+        self._test_ipv6_vip_ipv6_members_traffic(const.TCP, 81,
+                                                 persistent=False)
 
     @decorators.idempotent_id('26317013-a9b5-4a00-a993-d4c55b764e40')
     def test_ipv6_vip_ipv6_members_udp_traffic(self):
@@ -293,8 +297,68 @@
 
         self._test_ipv6_vip_ipv6_members_traffic(const.UDP, 8080)
 
+    @decorators.idempotent_id('9bead31b-0760-4c8f-b70a-f758fc5edd6a')
+    def test_ipv6_http_LC_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 90, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('843a13f7-e00f-4151-8817-b5395eb69b52')
+    def test_ipv6_tcp_LC_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 91, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('cc0d55b1-87e8-4a87-bf50-66299947a469')
+    def test_ipv6_udp_LC_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 92, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
     @decorators.idempotent_id('84b23f68-4bc3-49e5-8372-60c25fe69613')
-    def test_listener_with_allowed_cidrs(self):
+    def test_ipv6_http_RR_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 93, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('52c07510-5755-44a3-9231-64c9cbb4bbd4')
+    def test_ipv6_tcp_RR_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 94, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('df0417d9-dc72-4bb5-b3ce-1e2558a3c4a9')
+    def test_ipv6_udp_RR_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 95, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('d1256195-3d85-4ffd-bda3-1c0ab78b8ce1')
+    def test_ipv6_http_SI_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 96, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('bf8504b6-b95a-4f8a-9032-ab432db46eec')
+    def test_ipv6_tcp_SI_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 97, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('ce75bf28-5288-4821-a603-460e602de8b9')
+    def test_ipv6_udp_SI_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 98, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('c11768f1-19b4-48cc-99a5-0737379b1957')
+    def test_ipv6_http_SIP_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 99, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('fcfe2ab1-2c36-4793-a926-1fec589a9a2a')
+    def test_ipv6_tcp_SIP_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 100, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('80f31bc1-819e-4d9e-8820-bf3e28600540')
+    def test_ipv6_udp_SIP_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 101, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    def _test_listener_with_allowed_cidrs(self, protocol, protocol_port,
+                                          algorithm):
         """Tests traffic through a loadbalancer with allowed CIDRs set.
 
         * Set up listener with allowed CIDRS (allow all) on a loadbalancer.
@@ -312,11 +376,10 @@
                                      'or newer.')
 
         listener_name = data_utils.rand_name("lb_member_listener2_cidrs")
-        listener_port = 8080
         listener_kwargs = {
             const.NAME: listener_name,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: listener_port,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.ALLOWED_CIDRS: ['::/0']
         }
@@ -336,11 +399,25 @@
         pool_name = data_utils.rand_name("lb_member_pool3_cidrs")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LISTENER_ID: listener_id,
         }
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except tempest_exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         pool_id = pool[const.ID]
         self.addCleanup(
             self.mem_pool_client.cleanup_pool,
@@ -359,7 +436,7 @@
             const.POOL_ID: pool_id,
             const.NAME: member1_name,
             const.ADMIN_STATE_UP: True,
-            const.ADDRESS: self.webserver1_ip,
+            const.ADDRESS: self.webserver1_ipv6,
             const.PROTOCOL_PORT: 80,
         }
         if self.lb_member_1_subnet:
@@ -383,7 +460,7 @@
             const.POOL_ID: pool_id,
             const.NAME: member2_name,
             const.ADMIN_STATE_UP: True,
-            const.ADDRESS: self.webserver2_ip,
+            const.ADDRESS: self.webserver2_ipv6,
             const.PROTOCOL_PORT: 80,
         }
         if self.lb_member_2_subnet:
@@ -401,8 +478,13 @@
             CONF.load_balancer.check_timeout)
 
         # Send some traffic
+        members = 2
+        if algorithm == const.LB_ALGORITHM_SOURCE_IP:
+            members = 1
         self.check_members_balanced(
-            self.lb_vip_address, protocol_port=listener_port)
+            self.lb_vip_address, protocol=protocol,
+            protocol_port=protocol_port, persistent=False,
+            traffic_member_count=members)
 
         listener_kwargs = {
             const.LISTENER_ID: listener_id,
@@ -415,21 +497,27 @@
                                 CONF.load_balancer.build_interval,
                                 CONF.load_balancer.build_timeout)
 
-        url_for_vip = 'http://[{}]:{}/'.format(self.lb_vip_address,
-                                               listener_port)
-
         # NOTE: Before we start with the consistent response check, we must
         # wait until Neutron completes the SG update.
         # See https://bugs.launchpad.net/neutron/+bug/1866353.
-        def expect_conn_error(url):
+        def expect_timeout_error(address, protocol, protocol_port):
             try:
-                requests.Session().get(url)
-            except requests.exceptions.ConnectionError:
+                self.make_request(address, protocol=protocol,
+                                  protocol_port=protocol_port)
+            except tempest_exceptions.TimeoutException:
                 return True
             return False
 
-        waiters.wait_until_true(expect_conn_error, url=url_for_vip)
+        waiters.wait_until_true(
+            expect_timeout_error, address=self.lb_vip_address,
+            protocol=protocol, protocol_port=protocol_port)
 
         # Assert that the server is consistently unavailable
+        if protocol == const.UDP:
+            url_for_vip = 'udp://[{}]:{}/'.format(self.lb_vip_address,
+                                                  protocol_port)
+        else:
+            url_for_vip = 'http://[{}]:{}/'.format(self.lb_vip_address,
+                                                   protocol_port)
         self.assertConsistentResponse(
-            (None, None), url_for_vip, repeat=3, conn_error=True)
+            (None, None), url_for_vip, repeat=3, expect_connection_error=True)
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
index 46ce984..9e09f35 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
@@ -29,15 +29,6 @@
 class L7PolicyScenarioTest(test_base.LoadBalancerBaseTest):
 
     @classmethod
-    def skip_checks(cls):
-        super(L7PolicyScenarioTest, cls).skip_checks()
-        if not CONF.loadbalancer_feature_enabled.l7_protocol_enabled:
-            raise cls.skipException(
-                '[loadbalancer-feature-enabled] '
-                '"l7_protocol_enabled" is set to False in the Tempest '
-                'configuration. L7 Scenario tests will be skipped.')
-
-    @classmethod
     def resource_setup(cls):
         """Setup resources needed by the tests."""
         super(L7PolicyScenarioTest, cls).resource_setup()
@@ -84,7 +75,7 @@
         pool_kwargs = {
             const.NAME: pool_name,
             const.PROTOCOL: const.HTTP,
-            const.LB_ALGORITHM: cls.lb_algorithm,
+            const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
             const.LOADBALANCER_ID: cls.lb_id,
         }
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py b/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
index 1c147db..114ea3e 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
@@ -29,15 +29,6 @@
 class L7RuleScenarioTest(test_base.LoadBalancerBaseTest):
 
     @classmethod
-    def skip_checks(cls):
-        super(L7RuleScenarioTest, cls).skip_checks()
-        if not CONF.loadbalancer_feature_enabled.l7_protocol_enabled:
-            raise cls.skipException(
-                '[loadbalancer-feature-enabled] '
-                '"l7_protocol_enabled" is set to False in the Tempest '
-                'configuration. L7 Scenario tests will be skipped.')
-
-    @classmethod
     def resource_setup(cls):
         """Setup resources needed by the tests."""
         super(L7RuleScenarioTest, cls).resource_setup()
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
index c056bd0..29aa187 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
@@ -12,6 +12,7 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import testtools
 from uuid import UUID
 
 from dateutil import parser
@@ -19,6 +20,7 @@
 from tempest import config
 from tempest.lib.common.utils import data_utils
 from tempest.lib import decorators
+from tempest.lib import exceptions
 
 from octavia_tempest_plugin.common import constants as const
 from octavia_tempest_plugin.tests import test_base
@@ -51,23 +53,30 @@
                                 const.ACTIVE,
                                 CONF.load_balancer.lb_build_interval,
                                 CONF.load_balancer.lb_build_timeout)
-        cls.protocol = const.HTTP
-        lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = lb_feature_enabled.l4_protocol
+
+        cls.allowed_cidrs = ['192.0.1.0/24']
+        if CONF.load_balancer.test_with_ipv6:
+            cls.allowed_cidrs = ['2001:db8:a0b:12f0::/64']
+
+    def _create_pools(cls, protocol, algorithm):
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            cls.mem_listener_client.is_version_supported(
+                cls.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
 
         pool1_name = data_utils.rand_name("lb_member_pool1_listener")
         pool1_kwargs = {
             const.NAME: pool1_name,
-            const.PROTOCOL: cls.protocol,
-            const.LB_ALGORITHM: cls.lb_algorithm,
+            const.PROTOCOL: protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: cls.lb_id,
         }
         pool1 = cls.mem_pool_client.create_pool(**pool1_kwargs)
-        cls.pool1_id = pool1[const.ID]
+        pool1_id = pool1[const.ID]
         cls.addClassResourceCleanup(
-            cls.mem_pool_client.cleanup_pool,
-            cls.pool1_id,
+            cls.mem_pool_client.cleanup_pool, pool1_id,
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -79,15 +88,14 @@
         pool2_name = data_utils.rand_name("lb_member_pool2_listener")
         pool2_kwargs = {
             const.NAME: pool2_name,
-            const.PROTOCOL: cls.protocol,
-            const.LB_ALGORITHM: cls.lb_algorithm,
+            const.PROTOCOL: protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LOADBALANCER_ID: cls.lb_id,
         }
         pool2 = cls.mem_pool_client.create_pool(**pool2_kwargs)
-        cls.pool2_id = pool2[const.ID]
+        pool2_id = pool2[const.ID]
         cls.addClassResourceCleanup(
-            cls.mem_pool_client.cleanup_pool,
-            cls.pool2_id,
+            cls.mem_pool_client.cleanup_pool, pool2_id,
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -95,13 +103,128 @@
                                 const.ACTIVE,
                                 CONF.load_balancer.build_interval,
                                 CONF.load_balancer.build_timeout)
+        return pool1_id, pool2_id
 
-        cls.allowed_cidrs = ['192.0.1.0/24']
-        if CONF.load_balancer.test_with_ipv6:
-            cls.allowed_cidrs = ['2001:db8:a0b:12f0::/64']
+    # Note: TERMINATED_HTTPS listeners are covered in a different
+    #       tempest scenario suite due to the need for key-manager services
+
+    @decorators.idempotent_id('ecdd65b0-cf8f-48ee-972b-2f09425472f1')
+    def test_http_least_connections_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.HTTP,
+                                          const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_listener_CRUD(const.HTTP, pool1, pool2)
+
+    @decorators.idempotent_id('0681b2ac-8301-4e6c-bf29-b35244864af3')
+    def test_tcp_least_connections_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.TCP,
+                                          const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_listener_CRUD(const.TCP, pool1, pool2)
+
+    @decorators.idempotent_id('27a2ba7d-6147-46e4-886a-47c1ba63bf89')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_least_connections_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.UDP,
+                                          const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_listener_CRUD(const.UDP, pool1, pool2)
 
     @decorators.idempotent_id('4a874014-b7d1-49a4-ac9a-2400b3434700')
-    def test_listener_CRUD(self):
+    def test_http_round_robin_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.HTTP,
+                                          const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_listener_CRUD(const.HTTP, pool1, pool2)
+
+    @decorators.idempotent_id('2b888812-d916-44f0-b620-8d83dbb45975')
+    def test_tcp_round_robin_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.TCP,
+                                          const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_listener_CRUD(const.TCP, pool1, pool2)
+
+    @decorators.idempotent_id('dd913f74-c6a6-4998-9bed-095babb9cb47')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_round_robin_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.UDP,
+                                          const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_listener_CRUD(const.UDP, pool1, pool2)
+
+    @decorators.idempotent_id('b2ae8604-7a4f-477c-9658-fac27734671a')
+    def test_http_source_ip_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.HTTP,
+                                          const.LB_ALGORITHM_SOURCE_IP)
+        self._test_listener_CRUD(const.HTTP, pool1, pool2)
+
+    @decorators.idempotent_id('0ad3fdee-e8c2-4c44-9690-b8a838fbc7a5')
+    def test_tcp_source_ip_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.TCP,
+                                          const.LB_ALGORITHM_SOURCE_IP)
+        self._test_listener_CRUD(const.TCP, pool1, pool2)
+
+    @decorators.idempotent_id('7830aba8-12ca-40d9-9d9b-a63f7a43b287')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_source_ip_listener_CRUD(self):
+        pool1, pool2 = self._create_pools(const.UDP,
+                                          const.LB_ALGORITHM_SOURCE_IP)
+        self._test_listener_CRUD(const.UDP, pool1, pool2)
+
+    @decorators.idempotent_id('807a421e-5e99-4556-b0eb-512d39b25eac')
+    def test_http_source_ip_port_listener_CRUD(self):
+        try:
+            pool1, pool2 = self._create_pools(
+                const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+            self._test_listener_CRUD(const.HTTP, pool1, pool2)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @decorators.idempotent_id('6211f8ad-622d-404d-b199-8c2eb55ab340')
+    def test_tcp_source_ip_port_listener_CRUD(self):
+        try:
+            pool1, pool2 = self._create_pools(
+                const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+            self._test_listener_CRUD(const.TCP, pool1, pool2)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @decorators.idempotent_id('3f9a2de9-5012-437d-a907-a25e1f68ccfb')
+    # Skipping due to a status update bug in the amphora driver.
+    @decorators.skip_because(
+        bug='2007979',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_udp_source_ip_port_listener_CRUD(self):
+        try:
+            pool1, pool2 = self._create_pools(
+                const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+            self._test_listener_CRUD(const.UDP, pool1, pool2)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    def _test_listener_CRUD(self, protocol, pool1_id, pool2_id):
         """Tests listener create, read, update, delete
 
         * Create a fully populated listener.
@@ -117,19 +240,23 @@
             const.NAME: listener_name,
             const.DESCRIPTION: listener_description,
             const.ADMIN_STATE_UP: False,
-            const.PROTOCOL: self.protocol,
+            const.PROTOCOL: protocol,
             const.PROTOCOL_PORT: 80,
             const.LOADBALANCER_ID: self.lb_id,
             const.CONNECTION_LIMIT: 200,
-            const.INSERT_HEADERS: {
-                const.X_FORWARDED_FOR: "true",
-                const.X_FORWARDED_PORT: "true"
-            },
-            const.DEFAULT_POOL_ID: self.pool1_id,
+            const.DEFAULT_POOL_ID: pool1_id,
             # TODO(rm_work): need to finish the rest of this stuff
             # const.DEFAULT_TLS_CONTAINER_REF: '',
             # const.SNI_CONTAINER_REFS: [],
         }
+
+        if protocol in [const.HTTP, const.TERMINATED_HTTPS]:
+            listener_kwargs.update({
+                const.INSERT_HEADERS: {
+                    const.X_FORWARDED_FOR: "true",
+                    const.X_FORWARDED_PORT: "true"
+                },
+            })
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             listener_kwargs.update({
@@ -168,15 +295,16 @@
         UUID(listener[const.ID])
         # Operating status will be OFFLINE while admin_state_up = False
         self.assertEqual(const.OFFLINE, listener[const.OPERATING_STATUS])
-        self.assertEqual(self.protocol, listener[const.PROTOCOL])
+        self.assertEqual(protocol, listener[const.PROTOCOL])
         self.assertEqual(80, listener[const.PROTOCOL_PORT])
         self.assertEqual(200, listener[const.CONNECTION_LIMIT])
-        insert_headers = listener[const.INSERT_HEADERS]
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_FOR]))
-        self.assertTrue(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_PORT]))
-        self.assertEqual(self.pool1_id, listener[const.DEFAULT_POOL_ID])
+        if protocol in [const.HTTP, const.TERMINATED_HTTPS]:
+            insert_headers = listener[const.INSERT_HEADERS]
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_FOR]))
+            self.assertTrue(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PORT]))
+        self.assertEqual(pool1_id, listener[const.DEFAULT_POOL_ID])
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             self.assertEqual(1000, listener[const.TIMEOUT_CLIENT_DATA])
@@ -196,15 +324,18 @@
             const.DESCRIPTION: new_description,
             const.ADMIN_STATE_UP: True,
             const.CONNECTION_LIMIT: 400,
-            const.INSERT_HEADERS: {
-                const.X_FORWARDED_FOR: "false",
-                const.X_FORWARDED_PORT: "false"
-            },
-            const.DEFAULT_POOL_ID: self.pool2_id,
+            const.DEFAULT_POOL_ID: pool2_id,
             # TODO(rm_work): need to finish the rest of this stuff
             # const.DEFAULT_TLS_CONTAINER_REF: '',
             # const.SNI_CONTAINER_REFS: [],
         }
+        if protocol in [const.HTTP, const.TERMINATED_HTTPS]:
+            listener_update_kwargs.update({
+                const.INSERT_HEADERS: {
+                    const.X_FORWARDED_FOR: "false",
+                    const.X_FORWARDED_PORT: "false"
+                },
+            })
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             listener_update_kwargs.update({
@@ -251,15 +382,16 @@
             self.assertEqual(const.OFFLINE, listener[const.OPERATING_STATUS])
         else:
             self.assertEqual(const.ONLINE, listener[const.OPERATING_STATUS])
-        self.assertEqual(self.protocol, listener[const.PROTOCOL])
+        self.assertEqual(protocol, listener[const.PROTOCOL])
         self.assertEqual(80, listener[const.PROTOCOL_PORT])
         self.assertEqual(400, listener[const.CONNECTION_LIMIT])
-        insert_headers = listener[const.INSERT_HEADERS]
-        self.assertFalse(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_FOR]))
-        self.assertFalse(
-            strutils.bool_from_string(insert_headers[const.X_FORWARDED_PORT]))
-        self.assertEqual(self.pool2_id, listener[const.DEFAULT_POOL_ID])
+        if protocol in [const.HTTP, const.TERMINATED_HTTPS]:
+            insert_headers = listener[const.INSERT_HEADERS]
+            self.assertFalse(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_FOR]))
+            self.assertFalse(strutils.bool_from_string(
+                insert_headers[const.X_FORWARDED_PORT]))
+        self.assertEqual(pool2_id, listener[const.DEFAULT_POOL_ID])
         if self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             self.assertEqual(2000, listener[const.TIMEOUT_CLIENT_DATA])
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_load_balancer.py b/octavia_tempest_plugin/tests/scenario/v2/test_load_balancer.py
index 0aee398..6c52f84 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_load_balancer.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_load_balancer.py
@@ -22,7 +22,6 @@
 from tempest import config
 from tempest.lib.common.utils import data_utils
 from tempest.lib import decorators
-from tempest.lib import exceptions
 
 from octavia_tempest_plugin.common import constants as const
 from octavia_tempest_plugin.tests import test_base
@@ -77,7 +76,7 @@
                     cls.lb_admin_flavor_client.cleanup_a_flavor,
                     cls.flavor[const.ID])
                 cls.flavor_id = cls.flavor[const.ID]
-            except exceptions.NotImplemented:
+            except testtools.TestCase.skipException:
                 LOG.debug("Provider driver %s doesn't support flavors.",
                           CONF.load_balancer.provider)
                 cls.flavor_profile = None
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_member.py b/octavia_tempest_plugin/tests/scenario/v2/test_member.py
index 003b8b4..153cfd5 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_member.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_member.py
@@ -18,7 +18,9 @@
 from dateutil import parser
 from tempest import config
 from tempest.lib.common.utils import data_utils
+from tempest.lib.common.utils import misc
 from tempest.lib import decorators
+from tempest.lib import exceptions
 
 from octavia_tempest_plugin.common import constants as const
 from octavia_tempest_plugin.tests import test_base
@@ -27,6 +29,17 @@
 CONF = config.CONF
 
 
+# Member port numbers need to be unique on the shared pools so generate them
+@misc.singleton
+class MemberPort(object):
+
+    current_port = 8000
+
+    def increment(self):
+        self.current_port += 1
+        return self.current_port
+
+
 class MemberScenarioTest(test_base.LoadBalancerBaseTest):
 
     member_address = '2001:db8:0:0:0:0:0:1'
@@ -43,6 +56,10 @@
         cls._setup_lb_network_kwargs(lb_kwargs,
                                      ip_version=4)
 
+        cls.current_listener_port = 8000
+        cls.listener_pool_cache = {}
+        cls.member_port = MemberPort()
+
         lb = cls.mem_lb_client.create_loadbalancer(**lb_kwargs)
         cls.lb_id = lb[const.ID]
         cls.addClassResourceCleanup(
@@ -55,45 +72,43 @@
                                 CONF.load_balancer.lb_build_interval,
                                 CONF.load_balancer.lb_build_timeout)
 
-        # Per protocol listeners and pools IDs
-        cls.listener_ids = {}
-        cls.pool_ids = {}
-
-        cls.protocol = const.HTTP
-        lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = lb_feature_enabled.l4_protocol
-
-        # Don't use same ports for HTTP/l4_protocol and UDP since some previous
-        # releases (<=train) don't support it
-        cls._listener_pool_create(cls.protocol, 80)
-
-        cls._listener_pool_create(const.UDP, 8080)
-
     @classmethod
-    def _listener_pool_create(cls, protocol, protocol_port):
+    def _listener_pool_create(cls, listener_protocol, pool_protocol,
+                              algorithm):
         """Setup resources needed by the tests."""
-
-        if (protocol == const.UDP and
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            cls.mem_listener_client.is_version_supported(
+                cls.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+        if (listener_protocol == const.UDP and
                 not cls.mem_listener_client.is_version_supported(
                     cls.api_version, '2.1')):
-            return
+            raise cls.skipException('UDP listener support is only available '
+                                    'in Octavia API version 2.1 or newer')
+
+        # Cache listener/pool combinations we have already created as
+        # they can be reused for member test permutations
+        listener_pool_key = listener_protocol + pool_protocol + algorithm
+        pool_id = cls.listener_pool_cache.get(listener_pool_key, None)
+        if pool_id is not None:
+            return pool_id
 
         listener_name = data_utils.rand_name("lb_member_listener1_member")
         listener_kwargs = {
             const.NAME: listener_name,
-            const.PROTOCOL: protocol,
-            const.PROTOCOL_PORT: protocol_port,
+            const.PROTOCOL: listener_protocol,
+            const.PROTOCOL_PORT: cls.current_listener_port,
             const.LOADBALANCER_ID: cls.lb_id,
             # For branches that don't support multiple listeners in single
             # haproxy process and use haproxy>=1.8:
             const.CONNECTION_LIMIT: 200,
         }
+        cls.current_listener_port += 1
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
-        cls.listener_ids[protocol] = listener[const.ID]
         cls.addClassResourceCleanup(
-            cls.mem_listener_client.cleanup_listener,
-            cls.listener_ids[protocol],
+            cls.mem_listener_client.cleanup_listener, listener[const.ID],
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -105,15 +120,24 @@
         pool_name = data_utils.rand_name("lb_member_pool1_member")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: protocol,
-            const.LB_ALGORITHM: cls.lb_algorithm,
-            const.LISTENER_ID: cls.listener_ids[protocol],
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
+            const.LISTENER_ID: listener[const.ID],
         }
-        pool = cls.mem_pool_client.create_pool(**pool_kwargs)
-        cls.pool_ids[protocol] = pool[const.ID]
+        try:
+            pool = cls.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         cls.addClassResourceCleanup(
-            cls.mem_pool_client.cleanup_pool,
-            cls.pool_ids[protocol],
+            cls.mem_pool_client.cleanup_pool, pool[const.ID],
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -121,9 +145,250 @@
                                 const.ACTIVE,
                                 CONF.load_balancer.build_interval,
                                 CONF.load_balancer.build_timeout)
+        cls.listener_pool_cache[listener_pool_key] = pool[const.ID]
+        return pool[const.ID]
+
+    @decorators.idempotent_id('33abafca-ce57-479e-8480-843ef412d6a6')
+    def test_HTTP_LC_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('eab4eb32-b26f-4fe1-a606-1574b5b6182c')
+    def test_HTTP_LC_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('80355701-bc68-4cba-a9b3-4f35fc192b6a')
+    def test_HTTPS_LC_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('6f8fce94-b2aa-4497-b80f-74293d977d25')
+    def test_HTTPS_LC_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('0e45c423-db43-4fee-8442-d9daabe6b2aa')
+    def test_PROXY_LC_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('3ea2aad1-5650-4ec6-8394-501de33cce70')
+    def test_PROXY_LC_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('9b2e7e2d-776b-419c-9717-ab4fef9cd5ca')
+    def test_TCP_LC_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('06b95367-dc81-41e5-9a53-981833fb2979')
+    def test_TCP_LC_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('c70bd8c6-0f6a-4ee7-840f-a3355aefd471')
+    def test_UDP_LC_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('0b4ec248-c6a0-4d29-b77e-189453ec0535')
+    def test_UDP_LC_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
 
     @decorators.idempotent_id('15c8c3e3-569c-4029-95df-a9f72049e267')
-    def test_member_CRUD(self):
+    def test_HTTP_RR_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('a0f02494-ffb3-47be-8670-f56c0df9ec94')
+    def test_HTTP_RR_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('57aee0db-3295-42b7-a7d3-aae942a6cb41')
+    def test_HTTPS_RR_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('6c3e5bd7-4573-4f6d-ac64-31b238c9ea51')
+    def test_HTTPS_RR_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('e0ad1fa0-1fdb-472d-9d69-8968631c9239')
+    def test_PROXY_RR_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('534fbc38-1c70-4c67-8f89-74a6905b1c98')
+    def test_PROXY_RR_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('c4c72e4b-5abe-41df-9f1d-6a8a27c75a80')
+    def test_TCP_RR_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('673425e0-2a57-4c92-a416-7b4e0824708f')
+    def test_TCP_RR_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('f08c9efc-b69c-4c0f-a731-74ec8c17fc91')
+    def test_UDP_RR_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('94829e1e-506e-4f3c-ab04-4e338787ccfd')
+    def test_UDP_RR_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('07d1e571-d12c-4e04-90d1-8f4f42610df3')
+    def test_HTTP_SI_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('3910a7ec-63c5-4152-9fe1-ce21d3e1cdca')
+    def test_HTTP_SI_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('32b0b541-29dc-464b-91c1-115413539de7')
+    def test_HTTPS_SI_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('d59ea523-8dac-4e19-8df4-a7076a17296c')
+    def test_HTTPS_SI_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('12348506-1cfc-4d62-9cc2-d380776a9154')
+    def test_PROXY_SI_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('5d3879a6-d103-4800-bca4-1ef18ecbee68')
+    def test_PROXY_SI_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('efb158e2-de75-4d8b-8566-a0fa5fd75173')
+    def test_TCP_SI_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('4f1661e5-1dff-4910-9ecd-96327ea3e873')
+    def test_TCP_SI_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('0984583b-daaf-4509-bf1f-ff3acf33836b')
+    def test_UDP_SI_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('16b84495-e8f8-4e7b-b242-43a6e00fb8ad')
+    def test_UDP_SI_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('ab8f46fe-0c84-4755-a9a2-80cc1fbdea18')
+    def test_HTTP_SIP_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('7470bea5-9ea0-4e04-a82f-a0bed202b97d')
+    def test_HTTP_SIP_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('e59e9a7d-b6e7-43e9-b9d5-0717f113d769')
+    def test_HTTPS_SIP_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('761e1acd-3f4c-4e02-89e1-f89adfe2e3f9')
+    def test_HTTPS_SIP_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('ba7b0c73-df44-4a1a-a610-a107daabc36d')
+    def test_PROXY_SIP_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('ad43bc3f-2664-42c4-999f-9763facb8d15')
+    def test_PROXY_SIP_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('3341d05c-c199-496f-ac40-6248818ce831')
+    def test_TCP_SIP_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('5872f1de-1a33-4c20-bc02-7d058e3c3b55')
+    def test_TCP_SIP_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    @decorators.idempotent_id('9550835b-c9ef-44e3-8087-151c25a95168')
+    def test_UDP_SIP_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id)
+
+    @decorators.idempotent_id('5f40b080-0f2c-4791-a509-da7cfe9eace4')
+    def test_UDP_SIP_alt_monitor_member_crud(self):
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_member_CRUD(pool_id, alternate_monitoring=True)
+
+    def _test_member_CRUD(self, pool_id, alternate_monitoring=False):
         """Tests member create, read, update, delete
 
         * Create a fully populated member.
@@ -137,13 +402,15 @@
         member_kwargs = {
             const.NAME: member_name,
             const.ADMIN_STATE_UP: True,
-            const.POOL_ID: self.pool_ids[self.protocol],
+            const.POOL_ID: pool_id,
             const.ADDRESS: '192.0.2.1',
             const.PROTOCOL_PORT: 80,
             const.WEIGHT: 50,
-            const.MONITOR_ADDRESS: '192.0.2.2',
-            const.MONITOR_PORT: 8080,
         }
+        if alternate_monitoring:
+            member_kwargs[const.MONITOR_ADDRESS] = '192.0.2.2'
+            member_kwargs[const.MONITOR_PORT] = 8080
+
         if self.mem_member_client.is_version_supported(
                 self.api_version, '2.1'):
             member_kwargs.update({
@@ -153,14 +420,11 @@
         if self.lb_member_vip_subnet:
             member_kwargs[const.SUBNET_ID] = self.lb_member_vip_subnet[
                 const.ID]
-        hm_enabled = CONF.loadbalancer_feature_enabled.health_monitor_enabled
-        if not hm_enabled:
-            del member_kwargs[const.MONITOR_ADDRESS]
-            del member_kwargs[const.MONITOR_PORT]
+
         member = self.mem_member_client.create_member(**member_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member[const.ID], pool_id=self.pool_ids[self.protocol],
+            member[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
 
         waiters.wait_for_status(
@@ -174,7 +438,7 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[self.protocol])
+            pool_id=pool_id)
 
         parser.parse(member[const.CREATED_AT])
         parser.parse(member[const.UPDATED_AT])
@@ -189,12 +453,13 @@
             const.NO_MONITOR,
             CONF.load_balancer.check_interval,
             CONF.load_balancer.check_timeout,
-            pool_id=self.pool_ids[self.protocol])
+            pool_id=pool_id)
 
         equal_items = [const.NAME, const.ADMIN_STATE_UP, const.ADDRESS,
                        const.PROTOCOL_PORT, const.WEIGHT]
-        if hm_enabled:
+        if alternate_monitoring:
             equal_items += [const.MONITOR_ADDRESS, const.MONITOR_PORT]
+
         if self.mem_member_client.is_version_supported(
                 self.api_version, '2.1'):
             equal_items.append(const.BACKUP)
@@ -221,7 +486,7 @@
                 const.BACKUP: not member[const.BACKUP],
             })
 
-        if hm_enabled:
+        if alternate_monitoring:
             member_update_kwargs[const.MONITOR_ADDRESS] = '192.0.2.3'
             member_update_kwargs[const.MONITOR_PORT] = member[
                 const.MONITOR_PORT] + 1
@@ -239,11 +504,11 @@
             const.ACTIVE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[self.protocol])
+            pool_id=pool_id)
 
         # Test changed items
         equal_items = [const.NAME, const.ADMIN_STATE_UP, const.WEIGHT]
-        if hm_enabled:
+        if alternate_monitoring:
             equal_items += [const.MONITOR_ADDRESS, const.MONITOR_PORT]
         if self.mem_member_client.is_version_supported(
                 self.api_version, '2.1'):
@@ -271,14 +536,14 @@
             CONF.load_balancer.check_timeout)
         self.mem_member_client.delete_member(
             member[const.ID],
-            pool_id=self.pool_ids[self.protocol])
+            pool_id=pool_id)
 
         waiters.wait_for_deleted_status_or_not_found(
             self.mem_member_client.show_member, member[const.ID],
             const.PROVISIONING_STATUS,
             CONF.load_balancer.check_interval,
             CONF.load_balancer.check_timeout,
-            pool_id=self.pool_ids[self.protocol])
+            pool_id=pool_id)
 
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer,
@@ -287,12 +552,12 @@
             CONF.load_balancer.check_interval,
             CONF.load_balancer.check_timeout)
 
-    def _test_mixed_member_create(self, protocol):
+    def _test_mixed_member_create(self, pool_id):
         member_name = data_utils.rand_name("lb_member_member1-create")
         member_kwargs = {
             const.NAME: member_name,
             const.ADMIN_STATE_UP: True,
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.ADDRESS: self.member_address,
             const.PROTOCOL_PORT: 80,
             const.WEIGHT: 50,
@@ -306,7 +571,7 @@
             **member_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member[const.ID], pool_id=self.pool_ids[protocol],
+            member[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -314,7 +579,43 @@
             CONF.load_balancer.check_interval,
             CONF.load_balancer.check_timeout)
 
-    @decorators.idempotent_id('0623aa1f-753d-44e7-afa1-017d274eace7')
+    @decorators.idempotent_id('f9bc8ef1-cf21-41e5-819d-7561173e5286')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_HTTP_LC_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('e63c89a7-30a3-4eff-8ff5-dd62a5ecec0f')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_HTTPS_LC_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('efaa9ed0-c261-4184-9693-0020965606a8')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_PROXY_LC_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('f4ac056c-2cb8-457f-b1b1-9b49226f9b9f')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_TCP_LC_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('90e22b80-d52b-4af2-9c4d-9be44eed9575')
     @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
                           'IPv6 testing is disabled')
     # Skipping test for amphora driver until "UDP load balancers cannot mix
@@ -324,19 +625,180 @@
         bug='2003329',
         bug_type='storyboard',
         condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
-    def test_mixed_udp_member_create(self):
+    def test_mixed_UDP_LC_member_create(self):
         """Test the member creation with mixed IP protocol members/VIP."""
-
         if not self.mem_listener_client.is_version_supported(
                 self.api_version, '2.1'):
             raise self.skipException('UDP listener support is only available '
                                      'in Octavia API version 2.1 or newer')
-
-        self._test_mixed_member_create(const.UDP)
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_mixed_member_create(pool_id)
 
     @decorators.idempotent_id('b8afb91d-9b85-4569-85c7-03453df8990b')
     @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
                           'IPv6 testing is disabled')
-    def test_mixed_member_create(self):
+    def test_mixed_HTTP_RR_member_create(self):
         """Test the member creation with mixed IP protocol members/VIP."""
-        self._test_mixed_member_create(self.protocol)
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('a64dc345-4afe-4a2c-8a6a-178dd5a94670')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_HTTPS_RR_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('909aebf2-f9e4-4b96-943e-c02b8a415cd2')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_PROXY_RR_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('407ff3d4-f0a2-4d27-be69-3f2ec039a6a0')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_TCP_RR_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('e74b28cf-ab1a-423b-a1c5-d940e3c0a5ab')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_mixed_UDP_RR_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        if not self.mem_listener_client.is_version_supported(
+                self.api_version, '2.1'):
+            raise self.skipException('UDP listener support is only available '
+                                     'in Octavia API version 2.1 or newer')
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_ROUND_ROBIN)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('cc7f9272-84a6-436c-a529-171b67a45b62')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_HTTP_SI_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('704a10ed-d52d-4c75-9445-9ef98f7f540f')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_HTTPS_SI_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('4c516b5b-eb7b-4a4c-9a73-fba823332e25')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_PROXY_SI_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('61973bc8-8bc4-4aec-bf57-b37583887544')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_TCP_SI_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('ddab1836-ba9f-42e5-9630-1572d4a63501')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_mixed_UDP_SI_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        if not self.mem_listener_client.is_version_supported(
+                self.api_version, '2.1'):
+            raise self.skipException('UDP listener support is only available '
+                                     'in Octavia API version 2.1 or newer')
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_SOURCE_IP)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('b3dc557a-88ec-4bc6-84fd-c3aaab5d5920')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_HTTP_SIP_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.HTTP, const.HTTP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('d6f3908d-470a-4939-b407-c6d6324c06b6')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_HTTPS_SIP_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.HTTPS, const.HTTPS, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('ab745620-bf92-49e1-ac35-e42f266a7612')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_PROXY_SIP_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.PROXY, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('c7ffbd6e-5d9f-45e8-a5d0-2d26ea6b0ed0')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    def test_mixed_TCP_SIP_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        pool_id = self._listener_pool_create(
+            const.TCP, const.TCP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_mixed_member_create(pool_id)
+
+    @decorators.idempotent_id('aa6b282c-d1c2-4a39-b085-33c224d4faff')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'IPv6 testing is disabled')
+    # Skipping test for amphora driver until "UDP load balancers cannot mix
+    # protocol versions" (https://storyboard.openstack.org/#!/story/2003329) is
+    # fixed
+    @decorators.skip_because(
+        bug='2003329',
+        bug_type='storyboard',
+        condition=CONF.load_balancer.provider in const.AMPHORA_PROVIDERS)
+    def test_mixed_UDP_SIP_member_create(self):
+        """Test the member creation with mixed IP protocol members/VIP."""
+        if not self.mem_listener_client.is_version_supported(
+                self.api_version, '2.1'):
+            raise self.skipException('UDP listener support is only available '
+                                     'in Octavia API version 2.1 or newer')
+        pool_id = self._listener_pool_create(
+            const.UDP, const.UDP, const.LB_ALGORITHM_SOURCE_IP_PORT)
+        self._test_mixed_member_create(pool_id)
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
index 720e80a..fd938aa 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
@@ -12,12 +12,14 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import testtools
 from uuid import UUID
 
 from dateutil import parser
 from tempest import config
 from tempest.lib.common.utils import data_utils
 from tempest.lib import decorators
+from tempest.lib import exceptions
 
 from octavia_tempest_plugin.common import constants as const
 from octavia_tempest_plugin.tests import test_base
@@ -50,40 +52,286 @@
                                 const.ACTIVE,
                                 CONF.load_balancer.lb_build_interval,
                                 CONF.load_balancer.lb_build_timeout)
-        cls.protocol = const.HTTP
-        cls.lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not cls.lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = cls.lb_feature_enabled.l4_protocol
 
-        listener_name = data_utils.rand_name("lb_member_listener1_pool")
-        listener_kwargs = {
-            const.NAME: listener_name,
-            const.PROTOCOL: cls.protocol,
-            const.PROTOCOL_PORT: '80',
-            const.LOADBALANCER_ID: cls.lb_id,
-        }
-        listener = cls.mem_listener_client.create_listener(**listener_kwargs)
-        cls.listener_id = listener[const.ID]
-        cls.addClassResourceCleanup(
-            cls.mem_listener_client.cleanup_listener,
-            cls.listener_id,
-            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+    # Pool with Least Connections algorithm
+    @decorators.idempotent_id('f30bd185-ca13-45c1-8a2f-f4179e7f0c3a')
+    def test_HTTP_LC_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTP, protocol_port=10,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
 
-        waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
-                                cls.lb_id, const.PROVISIONING_STATUS,
-                                const.ACTIVE,
-                                CONF.load_balancer.build_interval,
-                                CONF.load_balancer.build_timeout)
+    @decorators.idempotent_id('d8c428b0-dee4-4374-8286-31e52aeb7fe5')
+    def test_HTTP_LC_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTP,
+                             pool_protocol=const.HTTP, protocol_port=11,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
 
+    @decorators.idempotent_id('82d8e035-4068-4bad-a87b-e4907bf6d464')
+    def test_HTTPS_LC_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTPS, protocol_port=12,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('726beb03-de8c-43cd-ba5f-e7d6faf627a3')
+    def test_HTTPS_LC_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTPS,
+                             pool_protocol=const.HTTPS, protocol_port=13,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('b3cef24e-343a-4e77-833b-422158d54673')
+    def test_PROXY_LC_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.PROXY, protocol_port=14,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('f1edfb45-a9d3-4150-8bc9-4fc3427c6346')
+    def test_PROXY_LC_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.PROXY, protocol_port=15,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('d6d067c3-ec63-4b5d-a364-acc7493ae3b8')
+    def test_TCP_LC_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.TCP, protocol_port=16,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('a159c345-9463-4c01-b571-086c789bd7d5')
+    def test_TCP_LC_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.TCP, protocol_port=17,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('6fea6a39-19eb-4a0e-b507-82ecc57c1dc5')
+    def test_UDP_LC_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.UDP, protocol_port=18,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('6ce12d8c-ad59-4e48-8de1-d26926735457')
+    def test_UDP_LC_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.UDP,
+                             pool_protocol=const.UDP, protocol_port=19,
+                             algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    # Pool with Round Robin algorithm
     @decorators.idempotent_id('dfa120bf-81b9-4f22-bb5e-7df660c18173')
-    def test_pool_standalone_CRUD(self):
-        self._test_pool_CRUD(has_listener=False)
+    def test_HTTP_RR_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTP, protocol_port=20,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
 
     @decorators.idempotent_id('087da8ab-79c7-48ba-871c-5769185cea3e')
-    def test_pool_with_listener_CRUD(self):
-        self._test_pool_CRUD(has_listener=True)
+    def test_HTTP_RR_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTP,
+                             pool_protocol=const.HTTP, protocol_port=21,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
 
-    def _test_pool_CRUD(self, has_listener):
+    @decorators.idempotent_id('6179a5d1-6425-4144-a437-b0d260b7b883')
+    def test_HTTPS_RR_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTPS, protocol_port=22,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('1b4585b4-c521-48e8-a69a-8a1d729a2949')
+    def test_HTTPS_RR_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTPS,
+                             pool_protocol=const.HTTPS, protocol_port=23,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('6b9f4f01-cb78-409a-b9fe-cbbeb27d0c5f')
+    def test_PROXY_RR_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.PROXY, protocol_port=24,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('0228ea63-dff5-4dfb-b48a-193e8509caa8')
+    def test_PROXY_RR_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.PROXY, protocol_port=25,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('71088923-cfdf-4821-a6a8-c7c9045b624d')
+    def test_TCP_RR_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.TCP, protocol_port=26,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('4b663772-5c6b-49a3-b592-49d91bd71ff1')
+    def test_TCP_RR_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.TCP, protocol_port=27,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('45aefaa0-c909-4861-91c6-517ea10285a5')
+    def test_UDP_RR_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.UDP, protocol_port=28,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('cff21560-52be-439f-a41f-789d365db567')
+    def test_UDP_RR_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.UDP,
+                             pool_protocol=const.UDP, protocol_port=29,
+                             algorithm=const.LB_ALGORITHM_ROUND_ROBIN)
+
+    # Pool with Source IP algorithm
+    @decorators.idempotent_id('4ef47185-ef22-4396-8c9c-b98b9b476605')
+    def test_HTTP_SI_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTP, protocol_port=30,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('13a5caba-42a5-4b8c-a389-74d630a91687')
+    def test_HTTP_SI_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTP,
+                             pool_protocol=const.HTTP, protocol_port=31,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('5ff7732a-7481-4c03-8efc-5ee794feb11a')
+    def test_HTTPS_SI_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTPS, protocol_port=32,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('30f3d93c-cc22-4821-8805-d5c41023eccd')
+    def test_HTTPS_SI_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTPS,
+                             pool_protocol=const.HTTPS, protocol_port=33,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('7cbb01b8-196b-4ac3-9fec-a41abf867850')
+    def test_PROXY_SI_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.PROXY, protocol_port=34,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('29327103-4949-4a77-a748-87ab725237b7')
+    def test_PROXY_SI_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.PROXY, protocol_port=35,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('6a4dd425-d7d9-40dd-b451-feb4b3c551cc')
+    def test_TCP_SI_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.TCP, protocol_port=36,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('4391d6a5-bb1c-4ff0-9f74-7b8c43a0b150')
+    def test_TCP_SI_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.TCP, protocol_port=37,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('211a688c-f495-4f32-a297-c64d240b5de0')
+    def test_UDP_SI_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.UDP, protocol_port=38,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('b19f1285-dbf2-4ac9-9199-3c3693148133')
+    def test_UDP_SI_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.UDP,
+                             pool_protocol=const.UDP, protocol_port=39,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP)
+
+    # Pool with Source IP Port algorithm
+    @decorators.idempotent_id('fee61d34-e272-42f5-92e2-69b515c6cded')
+    def test_HTTP_SIP_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTP, protocol_port=40,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('d99948da-649d-493c-a74d-72e532df0605')
+    def test_HTTP_SIP_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTP,
+                             pool_protocol=const.HTTP, protocol_port=41,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('b3c68f89-634e-4279-9546-9f2d2eac4bfa')
+    def test_HTTPS_SIP_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.HTTPS, protocol_port=42,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('4327f636-50c3-411c-b90e-0b907bdaffc5')
+    def test_HTTPS_SIP_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.HTTPS,
+                             pool_protocol=const.HTTPS, protocol_port=43,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('95a93e91-6ac0-40d5-999c-84a8b68c14f4')
+    def test_PROXY_SIP_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.PROXY, protocol_port=44,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('13893ac9-150f-4605-be68-6bdf65e2bb12')
+    def test_PROXY_SIP_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.PROXY, protocol_port=45,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('d045ea39-b6dd-4171-bb90-2b9970e25303')
+    def test_TCP_SIP_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.TCP, protocol_port=46,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('ec22ab54-8e0a-4472-8f70-78c34f28dc36')
+    def test_TCP_SIP_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.TCP,
+                             pool_protocol=const.TCP, protocol_port=47,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('0e0f0299-8c5e-4d7c-a99e-85db43b45446')
+    def test_UDP_SIP_pool_standalone_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=None,
+                             pool_protocol=const.UDP, protocol_port=48,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('66d50010-13ca-4588-ae36-61bb783d556e')
+    def test_UDP_SIP_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(listener_protocol=const.UDP,
+                             pool_protocol=const.UDP, protocol_port=49,
+                             algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    # Test with session persistence
+    @decorators.idempotent_id('d6b8119b-40e9-487d-a037-9972a1e688e8')
+    def test_HTTP_RR_app_cookie_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(
+            listener_protocol=const.HTTP,
+            pool_protocol=const.HTTP, protocol_port=50,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_APP_COOKIE)
+
+    @decorators.idempotent_id('a67f2276-6469-48d4-bf7e-ddf6d8694dba')
+    def test_HTTP_RR_http_cookie_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(
+            listener_protocol=const.HTTP,
+            pool_protocol=const.HTTP, protocol_port=51,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_HTTP_COOKIE)
+
+    @decorators.idempotent_id('c248e3d8-43d9-4fd4-93af-845747c9b939')
+    def test_HTTP_RR_source_IP_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(
+            listener_protocol=const.HTTP,
+            pool_protocol=const.HTTP, protocol_port=52,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    @decorators.idempotent_id('dc7f0ed5-f94c-4498-9dca-5dbc08e7162f')
+    def test_UDP_RR_source_ip_pool_with_listener_CRUD(self):
+        self._test_pool_CRUD(
+            listener_protocol=const.UDP,
+            pool_protocol=const.UDP, protocol_port=53,
+            algorithm=const.LB_ALGORITHM_ROUND_ROBIN,
+            session_persistence=const.SESSION_PERSISTENCE_SOURCE_IP)
+
+    def _test_pool_CRUD(self, listener_protocol, pool_protocol, protocol_port,
+                        algorithm, session_persistence=None):
         """Tests pool create, read, update, delete
 
         * Create a fully populated pool.
@@ -91,6 +339,35 @@
         * Update the pool.
         * Delete the pool.
         """
+        if (algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            self.mem_listener_client.is_version_supported(
+                self.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
+
+        # Listener create
+        if listener_protocol is not None:
+            listener_name = data_utils.rand_name("lb_member_listener1_pool")
+            listener_kwargs = {
+                const.NAME: listener_name,
+                const.PROTOCOL: listener_protocol,
+                const.PROTOCOL_PORT: protocol_port,
+                const.LOADBALANCER_ID: self.lb_id,
+            }
+            listener = self.mem_listener_client.create_listener(
+                **listener_kwargs)
+            listener_id = listener[const.ID]
+            self.addClassResourceCleanup(
+                self.mem_listener_client.cleanup_listener, listener_id,
+                lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
+            waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
+                                    self.lb_id, const.PROVISIONING_STATUS,
+                                    const.ACTIVE,
+                                    CONF.load_balancer.build_interval,
+                                    CONF.load_balancer.build_timeout)
+
         # Pool create
         pool_name = data_utils.rand_name("lb_member_pool1-CRUD")
         pool_description = data_utils.arbitrary_string(size=255)
@@ -99,20 +376,44 @@
             const.NAME: pool_name,
             const.DESCRIPTION: pool_description,
             const.ADMIN_STATE_UP: False,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: pool_protocol,
+            const.LB_ALGORITHM: algorithm,
         }
-        if self.lb_feature_enabled.session_persistence_enabled:
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             pool_kwargs[const.SESSION_PERSISTENCE] = {
                 const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
-                const.COOKIE_NAME: pool_sp_cookie_name,
+                const.COOKIE_NAME: pool_sp_cookie_name
             }
-        if has_listener:
-            pool_kwargs[const.LISTENER_ID] = self.listener_id
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            pool_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_SOURCE_IP
+            }
+
+        if listener_protocol is not None:
+            pool_kwargs[const.LISTENER_ID] = listener_id
         else:
             pool_kwargs[const.LOADBALANCER_ID] = self.lb_id
 
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         self.addCleanup(
             self.mem_pool_client.cleanup_pool,
             pool[const.ID],
@@ -137,24 +438,31 @@
         parser.parse(pool[const.UPDATED_AT])
         UUID(pool[const.ID])
         self.assertEqual(const.OFFLINE, pool[const.OPERATING_STATUS])
-        self.assertEqual(self.protocol, pool[const.PROTOCOL])
+        self.assertEqual(pool_protocol, pool[const.PROTOCOL])
         self.assertEqual(1, len(pool[const.LOADBALANCERS]))
         self.assertEqual(self.lb_id, pool[const.LOADBALANCERS][0][const.ID])
-        if has_listener:
+        if listener_protocol is not None:
             self.assertEqual(1, len(pool[const.LISTENERS]))
-            self.assertEqual(self.listener_id,
-                             pool[const.LISTENERS][0][const.ID])
+            self.assertEqual(listener_id, pool[const.LISTENERS][0][const.ID])
         else:
             self.assertEmpty(pool[const.LISTENERS])
-        self.assertEqual(self.lb_algorithm,
-                         pool[const.LB_ALGORITHM])
-        if self.lb_feature_enabled.session_persistence_enabled:
+        self.assertEqual(algorithm, pool[const.LB_ALGORITHM])
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
             self.assertEqual(const.SESSION_PERSISTENCE_APP_COOKIE,
                              pool[const.SESSION_PERSISTENCE][const.TYPE])
             self.assertEqual(pool_sp_cookie_name,
                              pool[const.SESSION_PERSISTENCE][
                                  const.COOKIE_NAME])
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_HTTP_COOKIE,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_SOURCE_IP,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
 
         # Pool update
         new_name = data_utils.rand_name("lb_member_pool1-update")
@@ -166,14 +474,26 @@
             const.ADMIN_STATE_UP: True,
         }
 
-        if self.lb_feature_enabled.pool_algorithms_enabled:
-            pool_update_kwargs[const.LB_ALGORITHM] = (
-                const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        # We have to set it to the same protocol as not all
+        # drivers support more than one pool algorithm
+        pool_update_kwargs[const.LB_ALGORITHM] = algorithm
 
-        if self.protocol == const.HTTP and (
-                self.lb_feature_enabled.session_persistence_enabled):
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             pool_update_kwargs[const.SESSION_PERSISTENCE] = {
-                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE}
+                const.TYPE: const.SESSION_PERSISTENCE_HTTP_COOKIE
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            pool_update_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_APP_COOKIE,
+                const.COOKIE_NAME: pool_sp_cookie_name
+            }
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            # Some protocols only support source IP session persistence
+            # so set this to the same.
+            pool_update_kwargs[const.SESSION_PERSISTENCE] = {
+                const.TYPE: const.SESSION_PERSISTENCE_SOURCE_IP
+            }
+
         pool = self.mem_pool_client.update_pool(
             pool[const.ID], **pool_update_kwargs)
 
@@ -192,15 +512,27 @@
         self.assertEqual(new_name, pool[const.NAME])
         self.assertEqual(new_description, pool[const.DESCRIPTION])
         self.assertTrue(pool[const.ADMIN_STATE_UP])
-        if self.lb_feature_enabled.pool_algorithms_enabled:
-            self.assertEqual(const.LB_ALGORITHM_LEAST_CONNECTIONS,
-                             pool[const.LB_ALGORITHM])
-        if self.lb_feature_enabled.session_persistence_enabled:
+        self.assertEqual(algorithm, pool[const.LB_ALGORITHM])
+
+        if session_persistence == const.SESSION_PERSISTENCE_APP_COOKIE:
             self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
             self.assertEqual(const.SESSION_PERSISTENCE_HTTP_COOKIE,
                              pool[const.SESSION_PERSISTENCE][const.TYPE])
             self.assertIsNone(
                 pool[const.SESSION_PERSISTENCE].get(const.COOKIE_NAME))
+        elif session_persistence == const.SESSION_PERSISTENCE_HTTP_COOKIE:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_APP_COOKIE,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+            self.assertEqual(pool_sp_cookie_name,
+                             pool[const.SESSION_PERSISTENCE][
+                                 const.COOKIE_NAME])
+        elif session_persistence == const.SESSION_PERSISTENCE_SOURCE_IP:
+            self.assertIsNotNone(pool.get(const.SESSION_PERSISTENCE))
+            self.assertEqual(const.SESSION_PERSISTENCE_SOURCE_IP,
+                             pool[const.SESSION_PERSISTENCE][const.TYPE])
+            self.assertIsNone(
+                pool[const.SESSION_PERSISTENCE].get(const.COOKIE_NAME))
 
         # Pool delete
         waiters.wait_for_status(
@@ -209,6 +541,7 @@
             const.ACTIVE,
             CONF.load_balancer.check_interval,
             CONF.load_balancer.check_timeout)
+
         self.mem_pool_client.delete_pool(pool[const.ID])
 
         waiters.wait_for_deleted_status_or_not_found(
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py b/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
index 7dd4a29..c8d917a 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
@@ -14,7 +14,6 @@
 
 import datetime
 import ipaddress
-import requests
 import shlex
 import testtools
 import time
@@ -24,22 +23,31 @@
 from tempest import config
 from tempest.lib.common.utils import data_utils
 from tempest.lib import decorators
+from tempest.lib import exceptions
 
 from octavia_tempest_plugin.common import constants as const
 from octavia_tempest_plugin.tests import test_base
-from octavia_tempest_plugin.tests import validators
 from octavia_tempest_plugin.tests import waiters
 
 CONF = config.CONF
 LOG = logging.getLogger(__name__)
 
 
-@testtools.skipUnless(
-    CONF.validation.run_validation,
-    'Traffic tests will not work without run_validation enabled.')
 class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
 
     @classmethod
+    def skip_checks(cls):
+        super().skip_checks()
+
+        if not CONF.validation.run_validation:
+            raise cls.skipException('Traffic tests will not work without '
+                                    'run_validation enabled.')
+
+        if CONF.load_balancer.test_with_noop:
+            raise cls.skipException('Traffic tests will not work in noop '
+                                    'mode.')
+
+    @classmethod
     def resource_setup(cls):
         """Setup resources needed by the tests."""
         super(TrafficOperationsScenarioTest, cls).resource_setup()
@@ -80,27 +88,19 @@
         else:
             cls.lb_vip_address = lb[const.VIP_ADDRESS]
 
-        # Per protocol listeners and pools IDs
-        cls.listener_ids = {}
-        cls.pool_ids = {}
-
-        cls.protocol = const.HTTP
-        lb_feature_enabled = CONF.loadbalancer_feature_enabled
-        if not lb_feature_enabled.l7_protocol_enabled:
-            cls.protocol = lb_feature_enabled.l4_protocol
-
-        # Don't use same ports for HTTP/l4_protocol and UDP because some
-        # releases (<=train) don't support it
-        cls._listener_pool_create(cls.protocol, 80)
-
-        cls._listener_pool_create(const.UDP, 8080)
-
     @classmethod
-    def _listener_pool_create(cls, protocol, protocol_port):
+    def _listener_pool_create(cls, protocol, protocol_port,
+                              pool_algorithm=const.LB_ALGORITHM_ROUND_ROBIN):
         if (protocol == const.UDP and
                 not cls.mem_listener_client.is_version_supported(
                     cls.api_version, '2.1')):
             return
+        if (pool_algorithm == const.LB_ALGORITHM_SOURCE_IP_PORT and not
+            cls.mem_listener_client.is_version_supported(
+                cls.api_version, '2.13')):
+            raise testtools.TestCase.skipException(
+                'Skipping this test as load balancing algorithm '
+                'SOURCE_IP_PORT requires API version 2.13 or newer.')
 
         listener_name = data_utils.rand_name("lb_member_listener1_operations")
         listener_kwargs = {
@@ -112,12 +112,10 @@
             # haproxy process and use haproxy>=1.8:
             const.CONNECTION_LIMIT: 200,
         }
-        listener = cls.mem_listener_client.create_listener(
-            **listener_kwargs)
-        cls.listener_ids[protocol] = listener[const.ID]
+        listener = cls.mem_listener_client.create_listener(**listener_kwargs)
         cls.addClassResourceCleanup(
             cls.mem_listener_client.cleanup_listener,
-            cls.listener_ids[protocol],
+            listener[const.ID],
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -130,14 +128,13 @@
         pool_kwargs = {
             const.NAME: pool_name,
             const.PROTOCOL: protocol,
-            const.LB_ALGORITHM: cls.lb_algorithm,
-            const.LISTENER_ID: cls.listener_ids[protocol],
+            const.LB_ALGORITHM: pool_algorithm,
+            const.LISTENER_ID: listener[const.ID],
         }
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
-        cls.pool_ids[protocol] = pool[const.ID]
         cls.addClassResourceCleanup(
             cls.mem_pool_client.cleanup_pool,
-            cls.pool_ids[protocol],
+            pool[const.ID],
             lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
 
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
@@ -146,7 +143,12 @@
                                 CONF.load_balancer.build_interval,
                                 CONF.load_balancer.build_timeout)
 
-    def _test_basic_traffic(self, protocol, protocol_port):
+        return listener[const.ID], pool[const.ID]
+
+    def _test_basic_traffic(
+            self, protocol, protocol_port, listener_id, pool_id,
+            persistent=True, traffic_member_count=2, source_port=None,
+            delay=None):
         """Tests sending traffic through a loadbalancer
 
         * Set up members on a loadbalancer.
@@ -155,7 +157,7 @@
         # Set up Member 1 for Webserver 1
         member1_name = data_utils.rand_name("lb_member_member1-traffic")
         member1_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member1_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver1_ip,
@@ -164,11 +166,10 @@
         if self.lb_member_1_subnet:
             member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
 
-        member1 = self.mem_member_client.create_member(
-            **member1_kwargs)
+        member1 = self.mem_member_client.create_member(**member1_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member1[const.ID], pool_id=self.pool_ids[protocol],
+            member1[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -179,7 +180,7 @@
         # Set up Member 2 for Webserver 2
         member2_name = data_utils.rand_name("lb_member_member2-traffic")
         member2_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member2_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver2_ip,
@@ -188,11 +189,10 @@
         if self.lb_member_2_subnet:
             member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
 
-        member2 = self.mem_member_client.create_member(
-            **member2_kwargs)
+        member2 = self.mem_member_client.create_member(**member2_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member2[const.ID], pool_id=self.pool_ids[protocol],
+            member2[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -201,16 +201,27 @@
             CONF.load_balancer.check_timeout)
 
         # Send some traffic
-        self.check_members_balanced(self.lb_vip_address,
-                                    protocol_port=protocol_port,
-                                    protocol=protocol)
+        self.check_members_balanced(
+            self.lb_vip_address, protocol_port=protocol_port,
+            persistent=persistent, protocol=protocol,
+            traffic_member_count=traffic_member_count, source_port=source_port,
+            delay=delay)
 
     @decorators.attr(type=['smoke', 'slow'])
     @testtools.skipIf(CONF.load_balancer.test_with_noop,
                       'Traffic tests will not work in noop mode.')
     @decorators.idempotent_id('6751135d-e15a-4e22-89f4-bfcc3408d424')
-    def test_basic_traffic(self):
-        self._test_basic_traffic(self.protocol, 80)
+    def test_basic_http_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(const.HTTP, 80)
+        self._test_basic_traffic(const.HTTP, 80, listener_id, pool_id)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('332a08e0-eff1-4c19-b46c-bf87148a6d84')
+    def test_basic_tcp_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(const.TCP, 81)
+        self._test_basic_traffic(const.TCP, 81, listener_id, pool_id,
+                                 persistent=False)
 
     @testtools.skipIf(CONF.load_balancer.test_with_noop,
                       'Traffic tests will not work in noop mode.')
@@ -220,10 +231,11 @@
                 self.api_version, '2.1'):
             raise self.skipException('UDP listener support is only available '
                                      'in Octavia API version 2.1 or newer')
+        listener_id, pool_id = self._listener_pool_create(const.UDP, 8080)
+        self._test_basic_traffic(const.UDP, 8080, listener_id, pool_id)
 
-        self._test_basic_traffic(const.UDP, 8080)
-
-    def _test_healthmonitor_traffic(self, protocol, protocol_port):
+    def _test_healthmonitor_traffic(self, protocol, protocol_port,
+                                    listener_id, pool_id, persistent=True):
         """Tests traffic is correctly routed based on healthmonitor status
 
         * Create three members:
@@ -242,7 +254,7 @@
 
         member1_name = data_utils.rand_name("lb_member_member1-hm-traffic")
         member1_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member1_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver1_ip,
@@ -251,12 +263,11 @@
         if self.lb_member_1_subnet:
             member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
 
-        member1 = self.mem_member_client.create_member(
-            **member1_kwargs)
+        member1 = self.mem_member_client.create_member(**member1_kwargs)
         member1_id = member1[const.ID]
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member1_id, pool_id=self.pool_ids[protocol],
+            member1_id, pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -267,7 +278,7 @@
         # Set up Member 2 for Webserver 2
         member2_name = data_utils.rand_name("lb_member_member2-hm-traffic")
         member2_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member2_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver2_ip,
@@ -277,12 +288,11 @@
         if self.lb_member_2_subnet:
             member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
 
-        member2 = self.mem_member_client.create_member(
-            **member2_kwargs)
+        member2 = self.mem_member_client.create_member(**member2_kwargs)
         member2_id = member2[const.ID]
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member2_id, pool_id=self.pool_ids[protocol],
+            member2_id, pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -293,19 +303,18 @@
         # Set up Member 3 as a non-existent disabled node
         member3_name = data_utils.rand_name("lb_member_member3-hm-traffic")
         member3_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member3_name,
             const.ADMIN_STATE_UP: False,
             const.ADDRESS: '192.0.2.1',
             const.PROTOCOL_PORT: 80,
         }
 
-        member3 = self.mem_member_client.create_member(
-            **member3_kwargs)
+        member3 = self.mem_member_client.create_member(**member3_kwargs)
         member3_id = member3[const.ID]
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member3_id, pool_id=self.pool_ids[protocol],
+            member3_id, pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -320,27 +329,26 @@
             const.NO_MONITOR,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
         waiters.wait_for_status(
             self.mem_member_client.show_member,
             member2_id, const.OPERATING_STATUS,
             const.NO_MONITOR,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
         waiters.wait_for_status(
             self.mem_member_client.show_member,
             member3_id, const.OPERATING_STATUS,
             const.OFFLINE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
 
         # Send some traffic and verify it is balanced
         self.check_members_balanced(self.lb_vip_address,
                                     protocol_port=protocol_port,
-                                    protocol=protocol,
-                                    traffic_member_count=2)
+                                    protocol=protocol, persistent=persistent)
 
         # Create the healthmonitor
         hm_name = data_utils.rand_name("lb_member_hm1-hm-traffic")
@@ -351,7 +359,7 @@
                 hm_type = const.HEALTH_MONITOR_TCP
 
             hm_kwargs = {
-                const.POOL_ID: self.pool_ids[protocol],
+                const.POOL_ID: pool_id,
                 const.NAME: hm_name,
                 const.TYPE: hm_type,
                 const.DELAY: 3,
@@ -362,7 +370,7 @@
             }
         else:
             hm_kwargs = {
-                const.POOL_ID: self.pool_ids[protocol],
+                const.POOL_ID: pool_id,
                 const.NAME: hm_name,
                 const.TYPE: const.HEALTH_MONITOR_HTTP,
                 const.DELAY: 2,
@@ -400,27 +408,28 @@
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
             error_ok=True,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
         waiters.wait_for_status(
             self.mem_member_client.show_member,
             member2_id, const.OPERATING_STATUS,
             const.ERROR,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
         waiters.wait_for_status(
             self.mem_member_client.show_member,
             member3_id, const.OPERATING_STATUS,
             const.OFFLINE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
 
         # Send some traffic and verify it is *unbalanced*, as expected
         self.check_members_balanced(self.lb_vip_address,
                                     protocol_port=protocol_port,
                                     protocol=protocol,
-                                    traffic_member_count=1)
+                                    traffic_member_count=1,
+                                    persistent=persistent)
 
         # Delete the healthmonitor
         self.mem_healthmonitor_client.delete_healthmonitor(hm[const.ID])
@@ -438,37 +447,38 @@
             const.NO_MONITOR,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
         waiters.wait_for_status(
             self.mem_member_client.show_member,
             member2_id, const.OPERATING_STATUS,
             const.NO_MONITOR,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
         waiters.wait_for_status(
             self.mem_member_client.show_member,
             member3_id, const.OPERATING_STATUS,
             const.OFFLINE,
             CONF.load_balancer.build_interval,
             CONF.load_balancer.build_timeout,
-            pool_id=self.pool_ids[protocol])
+            pool_id=pool_id)
 
         # Send some traffic and verify it is balanced again
         self.check_members_balanced(self.lb_vip_address,
                                     protocol_port=protocol_port,
-                                    protocol=protocol)
+                                    protocol=protocol, persistent=persistent)
 
-    @testtools.skipUnless(
-        CONF.loadbalancer_feature_enabled.health_monitor_enabled,
-        'Health monitor testing is disabled')
     @decorators.idempotent_id('a16f8eb4-a77c-4b0e-8b1b-91c237039713')
-    def test_healthmonitor_traffic(self):
-        self._test_healthmonitor_traffic(self.protocol, 80)
+    def test_healthmonitor_http_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(const.HTTP, 82)
+        self._test_healthmonitor_traffic(const.HTTP, 82, listener_id, pool_id)
 
-    @testtools.skipUnless(
-        CONF.loadbalancer_feature_enabled.health_monitor_enabled,
-        'Health monitor testing is disabled')
+    @decorators.idempotent_id('22f00c34-343b-4aa9-90be-4567ecf85772')
+    def test_healthmonitor_tcp_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(const.TCP, 83)
+        self._test_healthmonitor_traffic(const.TCP, 83, listener_id, pool_id,
+                                         persistent=False)
+
     @decorators.idempotent_id('80b86513-1a76-4e42-91c9-cb23c879e536')
     def test_healthmonitor_udp_traffic(self):
         if not self.mem_listener_client.is_version_supported(
@@ -476,13 +486,11 @@
             raise self.skipException('UDP listener support is only available '
                                      'in Octavia API version 2.1 or newer')
 
-        self._test_healthmonitor_traffic(const.UDP, 8080)
+        listener_id, pool_id = self._listener_pool_create(const.UDP, 8081)
+        self._test_healthmonitor_traffic(const.UDP, 8081, listener_id, pool_id)
 
-    @testtools.skipUnless(
-        CONF.loadbalancer_feature_enabled.l7_protocol_enabled,
-        'L7 protocol testing is disabled')
     @decorators.idempotent_id('3558186d-6dcd-4d9d-b7f7-adc190b66149')
-    def test_l7policies_and_l7rules(self):
+    def test_http_l7policies_and_l7rules(self):
         """Tests sending traffic through a loadbalancer with l7rules
 
         * Create an extra pool.
@@ -492,6 +500,9 @@
         * Create a policy/rule to reject connections.
         * Test traffic to ensure it goes to the correct place.
         """
+        LISTENER_PORT = 84
+        listener_id, pool_id = self._listener_pool_create(const.HTTP,
+                                                          LISTENER_PORT)
         protocol = const.HTTP
 
         # Create a second pool
@@ -499,14 +510,14 @@
         pool_kwargs = {
             const.NAME: pool_name,
             const.PROTOCOL: protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.LB_ALGORITHM: const.LB_ALGORITHM_ROUND_ROBIN,
             const.LOADBALANCER_ID: self.lb_id,
         }
         pool = self.mem_pool_client.create_pool(**pool_kwargs)
-        pool_id = pool[const.ID]
+        pool2_id = pool[const.ID]
         self.addCleanup(
             self.mem_pool_client.cleanup_pool,
-            pool_id,
+            pool2_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
 
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
@@ -518,7 +529,7 @@
         # Set up Member 1 for Webserver 1 on the default pool
         member1_name = data_utils.rand_name("lb_member_member1-l7redirect")
         member1_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member1_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver1_ip,
@@ -531,7 +542,7 @@
             **member1_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member1[const.ID], pool_id=self.pool_ids[protocol],
+            member1[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -542,7 +553,7 @@
         # Set up Member 2 for Webserver 2 on the alternate pool
         member2_name = data_utils.rand_name("lb_member_member2-l7redirect")
         member2_kwargs = {
-            const.POOL_ID: pool_id,
+            const.POOL_ID: pool2_id,
             const.NAME: member2_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver2_ip,
@@ -555,7 +566,7 @@
             **member2_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member2[const.ID], pool_id=self.pool_ids[protocol],
+            member2[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -567,13 +578,13 @@
         l7policy1_name = data_utils.rand_name("lb_member_l7policy1-l7redirect")
         l7policy1_description = data_utils.arbitrary_string(size=255)
         l7policy1_kwargs = {
-            const.LISTENER_ID: self.listener_ids[protocol],
+            const.LISTENER_ID: listener_id,
             const.NAME: l7policy1_name,
             const.DESCRIPTION: l7policy1_description,
             const.ADMIN_STATE_UP: True,
             const.POSITION: 1,
             const.ACTION: const.REDIRECT_TO_POOL,
-            const.REDIRECT_POOL_ID: pool_id,
+            const.REDIRECT_POOL_ID: pool2_id,
         }
         l7policy1 = self.mem_l7policy_client.create_l7policy(
             **l7policy1_kwargs)
@@ -612,7 +623,7 @@
         l7policy2_name = data_utils.rand_name("lb_member_l7policy2-l7redirect")
         l7policy2_description = data_utils.arbitrary_string(size=255)
         l7policy2_kwargs = {
-            const.LISTENER_ID: self.listener_ids[protocol],
+            const.LISTENER_ID: listener_id,
             const.NAME: l7policy2_name,
             const.DESCRIPTION: l7policy2_description,
             const.ADMIN_STATE_UP: True,
@@ -657,7 +668,7 @@
         l7policy3_name = data_utils.rand_name("lb_member_l7policy3-l7redirect")
         l7policy3_description = data_utils.arbitrary_string(size=255)
         l7policy3_kwargs = {
-            const.LISTENER_ID: self.listener_ids[protocol],
+            const.LISTENER_ID: listener_id,
             const.NAME: l7policy3_name,
             const.DESCRIPTION: l7policy3_description,
             const.ADMIN_STATE_UP: True,
@@ -699,17 +710,20 @@
             CONF.load_balancer.build_timeout)
 
         # Assert that normal traffic goes to pool1->member1
-        url_for_member1 = 'http://{}/'.format(self.lb_vip_address)
+        url_for_member1 = 'http://{}:{}/'.format(self.lb_vip_address,
+                                                 LISTENER_PORT)
         self.assertConsistentResponse((200, self.webserver1_response),
                                       url_for_member1)
 
         # Assert that slow traffic goes to pool2->member2
-        url_for_member2 = 'http://{}/slow?delay=1s'.format(self.lb_vip_address)
+        url_for_member2 = 'http://{}:{}/slow?delay=1s'.format(
+            self.lb_vip_address, LISTENER_PORT)
         self.assertConsistentResponse((200, self.webserver2_response),
                                       url_for_member2)
 
         # Assert that /turtles is redirected to identity
-        url_for_identity = 'http://{}/turtles'.format(self.lb_vip_address)
+        url_for_identity = 'http://{}:{}/turtles'.format(self.lb_vip_address,
+                                                         LISTENER_PORT)
         self.assertConsistentResponse((302, CONF.identity.uri_v3),
                                       url_for_identity,
                                       redirect=True)
@@ -719,7 +733,9 @@
                                       url_for_member1,
                                       headers={'reject': 'true'})
 
-    def _test_mixed_ipv4_ipv6_members_traffic(self, protocol, protocol_port):
+    def _test_mixed_ipv4_ipv6_members_traffic(self, protocol, protocol_port,
+                                              listener_id, pool_id,
+                                              persistent=True):
         """Tests traffic through a loadbalancer with IPv4 and IPv6 members.
 
         * Set up members on a loadbalancer.
@@ -729,7 +745,7 @@
         # Set up Member 1 for Webserver 1
         member1_name = data_utils.rand_name("lb_member_member1-traffic")
         member1_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member1_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver1_ip,
@@ -742,7 +758,7 @@
             **member1_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member1[const.ID], pool_id=self.pool_ids[protocol],
+            member1[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -753,7 +769,7 @@
         # Set up Member 2 for Webserver 2
         member2_name = data_utils.rand_name("lb_member_member2-traffic")
         member2_kwargs = {
-            const.POOL_ID: self.pool_ids[protocol],
+            const.POOL_ID: pool_id,
             const.NAME: member2_name,
             const.ADMIN_STATE_UP: True,
             const.ADDRESS: self.webserver2_ipv6,
@@ -767,7 +783,7 @@
             **member2_kwargs)
         self.addCleanup(
             self.mem_member_client.cleanup_member,
-            member2[const.ID], pool_id=self.pool_ids[protocol],
+            member2[const.ID], pool_id=pool_id,
             lb_client=self.mem_lb_client, lb_id=self.lb_id)
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
@@ -778,15 +794,28 @@
         # Send some traffic
         self.check_members_balanced(self.lb_vip_address,
                                     protocol_port=protocol_port,
-                                    protocol=protocol)
+                                    protocol=protocol, persistent=persistent)
 
     @testtools.skipIf(CONF.load_balancer.test_with_noop,
                       'Traffic tests will not work in noop mode.')
     @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
                           'Mixed IPv4/IPv6 member test requires IPv6.')
     @decorators.idempotent_id('20b6b671-0101-4bed-a249-9af6ee3aa6d9')
-    def test_mixed_ipv4_ipv6_members_traffic(self):
-        self._test_mixed_ipv4_ipv6_members_traffic(self.protocol, 80)
+    def test_mixed_ipv4_ipv6_members_http_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(const.HTTP, 85)
+        self._test_mixed_ipv4_ipv6_members_traffic(const.HTTP, 85,
+                                                   listener_id, pool_id)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @testtools.skipUnless(CONF.load_balancer.test_with_ipv6,
+                          'Mixed IPv4/IPv6 member test requires IPv6.')
+    @decorators.idempotent_id('c442ae84-0abc-4470-8c7e-14a07e92a6fa')
+    def test_mixed_ipv4_ipv6_members_tcp_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(const.TCP, 86)
+        self._test_mixed_ipv4_ipv6_members_traffic(const.TCP, 86,
+                                                   listener_id, pool_id,
+                                                   persistent=False)
 
     @testtools.skipIf(CONF.load_balancer.test_with_noop,
                       'Traffic tests will not work in noop mode.')
@@ -805,8 +834,143 @@
                 self.api_version, '2.1'):
             raise self.skipException('UDP listener support is only available '
                                      'in Octavia API version 2.1 or newer')
+        listener_id, pool_id = self._listener_pool_create(const.UDP, 8082)
+        self._test_mixed_ipv4_ipv6_members_traffic(const.UDP, 8082,
+                                                   listener_id, pool_id)
 
-        self._test_mixed_ipv4_ipv6_members_traffic(const.UDP, 8080)
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('a58063fb-b9e8-4cfc-8a8c-7b2e9e884e7a')
+    def test_least_connections_http_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(
+            const.HTTP, 87,
+            pool_algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_basic_traffic(const.HTTP, 87, listener_id, pool_id)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('e1056709-6a1a-4a15-80c2-5cbb8279f924')
+    def test_least_connections_tcp_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(
+            const.TCP, 88, pool_algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_basic_traffic(const.TCP, 88, listener_id, pool_id,
+                                 persistent=False, delay=0.2)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('b5285410-507c-4629-90d4-6161540033d9')
+    def test_least_connections_udp_traffic(self):
+        if not self.mem_listener_client.is_version_supported(
+                self.api_version, '2.1'):
+            raise self.skipException('UDP listener support is only available '
+                                     'in Octavia API version 2.1 or newer')
+        listener_id, pool_id = self._listener_pool_create(
+            const.UDP, 8083,
+            pool_algorithm=const.LB_ALGORITHM_LEAST_CONNECTIONS)
+        self._test_basic_traffic(const.UDP, 8083, listener_id, pool_id)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('881cc3e9-a011-4043-b0e3-a6185f736053')
+    def test_source_ip_http_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(
+            const.HTTP, 89,
+            pool_algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_basic_traffic(const.HTTP, 89, listener_id, pool_id,
+                                 traffic_member_count=1, persistent=False)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('4568db0e-4243-4191-a822-9d327a55fa64')
+    def test_source_ip_tcp_traffic(self):
+        listener_id, pool_id = self._listener_pool_create(
+            const.TCP, 90, pool_algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_basic_traffic(const.TCP, 90, listener_id, pool_id,
+                                 traffic_member_count=1, persistent=False)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('be9e6ef2-7840-47d7-9315-cdb1e897b202')
+    def test_source_ip_udp_traffic(self):
+        if not self.mem_listener_client.is_version_supported(
+                self.api_version, '2.1'):
+            raise self.skipException('UDP listener support is only available '
+                                     'in Octavia API version 2.1 or newer')
+        listener_id, pool_id = self._listener_pool_create(
+            const.UDP, 8084,
+            pool_algorithm=const.LB_ALGORITHM_SOURCE_IP)
+        self._test_basic_traffic(const.UDP, 8084, listener_id, pool_id,
+                                 traffic_member_count=1, persistent=False)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('a446585b-5651-40ce-a4db-cb2ab4d37c03')
+    def test_source_ip_port_http_traffic(self):
+        # This is a special case as the reference driver does not support
+        # this test. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            listener_id, pool_id = self._listener_pool_create(
+                const.HTTP, 60091,
+                pool_algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+            self._test_basic_traffic(
+                const.HTTP, 60091, listener_id, pool_id,
+                traffic_member_count=1, persistent=False, source_port=60091)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('60108f30-d870-487c-ab96-8d8a9b587b94')
+    def test_source_ip_port_tcp_traffic(self):
+        # This is a special case as the reference driver does not support
+        # this test. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            listener_id, pool_id = self._listener_pool_create(
+                const.TCP, 60092,
+                pool_algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+            self._test_basic_traffic(
+                const.TCP, 60092, listener_id, pool_id, traffic_member_count=1,
+                persistent=False, source_port=60092)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
+    @testtools.skipIf(CONF.load_balancer.test_with_noop,
+                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('a67dfa58-6953-4a0f-8a65-3f153b254c98')
+    def test_source_ip_port_udp_traffic(self):
+        if not self.mem_listener_client.is_version_supported(
+                self.api_version, '2.1'):
+            raise self.skipException('UDP listener support is only available '
+                                     'in Octavia API version 2.1 or newer')
+        # This is a special case as the reference driver does not support
+        # this test. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            listener_id, pool_id = self._listener_pool_create(
+                const.UDP, 8085,
+                pool_algorithm=const.LB_ALGORITHM_SOURCE_IP_PORT)
+            self._test_basic_traffic(
+                const.UDP, 8085, listener_id, pool_id, traffic_member_count=1,
+                persistent=False, source_port=8085)
+        except exceptions.NotImplemented as e:
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
 
     @testtools.skipIf(CONF.load_balancer.test_with_noop,
                       'Log offload tests will not work in noop mode.')
@@ -814,9 +978,6 @@
         CONF.loadbalancer_feature_enabled.log_offload_enabled,
         'Skipping log offload tests because tempest configuration '
         '[loadbalancer-feature-enabled] log_offload_enabled is False.')
-    @testtools.skipUnless(
-        CONF.loadbalancer_feature_enabled.l7_protocol_enabled,
-        'Log offload tests require l7_protocol_enabled.')
     @decorators.idempotent_id('571dddd9-f5bd-404e-a799-9df7ac9e2fa9')
     def test_tenant_flow_log(self):
         """Tests tenant flow log offloading
@@ -898,7 +1059,7 @@
         # Make the request
         URL = 'http://{0}:{1}/{2}'.format(
             self.lb_vip_address, protocol_port, unique_request_id)
-        validators.validate_URL_response(URL, expected_status_code=200)
+        self.validate_URL_response(URL, expected_status_code=200)
 
         # We need to give the log subsystem time to commit the log
         time.sleep(CONF.load_balancer.check_interval)
@@ -942,10 +1103,68 @@
         self.assertTrue(fields[14].isdigit())  # processing_time
         self.assertEqual('----', fields[15])  # term_state
 
-    @testtools.skipIf(CONF.load_balancer.test_with_noop,
-                      'Traffic tests will not work in noop mode.')
+    @decorators.idempotent_id('04399db0-04f0-4cb5-bb27-a12bf18bfe08')
+    def test_http_LC_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 90, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
+    @decorators.idempotent_id('3d8d95b6-55e8-4bb9-b474-4ac35abaff22')
+    def test_tcp_LC_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 91, const.LB_ALGORITHM_LEAST_CONNECTIONS, delay=0.2)
+
+    @decorators.idempotent_id('7456b558-9add-4e0e-988e-06803f8047f7')
+    def test_udp_LC_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 92, const.LB_ALGORITHM_LEAST_CONNECTIONS)
+
     @decorators.idempotent_id('13b0f2de-9934-457b-8be0-f1bffc6915a0')
-    def test_listener_with_allowed_cidrs(self):
+    def test_http_RR_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 93, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('8bca1325-f894-494d-95c6-3ea4c3df6a0b')
+    def test_tcp_RR_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 94, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('93675cc3-e765-464b-9563-e0848dc75330')
+    def test_udp_RR_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 95, const.LB_ALGORITHM_ROUND_ROBIN)
+
+    @decorators.idempotent_id('fb5f35c1-08c9-43f7-8ed1-0395a3ef4735')
+    def test_http_SI_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 96, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('c0904c88-2479-42e2-974f-55041f30e6c5')
+    def test_tcp_SI_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 97, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('4f73bac5-2c98-45f9-8976-724c99e39979')
+    def test_udp_SI_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 98, const.LB_ALGORITHM_SOURCE_IP)
+
+    @decorators.idempotent_id('d198ddc5-1bcb-4310-a1b0-fa1a6328c4e9')
+    def test_http_SIP_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.HTTP, 99, const.LB_ALGORITHM_SOURCE_IP_PORT)
+
+    @decorators.idempotent_id('bbb09dbb-2aad-4281-9383-4bb4ad420ee1')
+    def test_tcp_SIP_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.TCP, 100, const.LB_ALGORITHM_SOURCE_IP_PORT, delay=0.2)
+
+    @decorators.idempotent_id('70290a9d-0065-42ad-bb46-884a535d2da2')
+    def test_udp_SIP_listener_with_allowed_cidrs(self):
+        self._test_listener_with_allowed_cidrs(
+            const.UDP, 101, const.LB_ALGORITHM_SOURCE_IP_PORT, delay=0.2)
+
+    def _test_listener_with_allowed_cidrs(self, protocol, protocol_port,
+                                          algorithm, delay=None):
         """Tests traffic through a loadbalancer with allowed CIDRs set.
 
         * Set up listener with allowed CIDRS (allow all) on a loadbalancer.
@@ -963,11 +1182,10 @@
                                      'or newer.')
 
         listener_name = data_utils.rand_name("lb_member_listener2_cidrs")
-        listener_port = 8080
         listener_kwargs = {
             const.NAME: listener_name,
-            const.PROTOCOL: self.protocol,
-            const.PROTOCOL_PORT: listener_port,
+            const.PROTOCOL: protocol,
+            const.PROTOCOL_PORT: protocol_port,
             const.LOADBALANCER_ID: self.lb_id,
             const.ALLOWED_CIDRS: ['0.0.0.0/0']
         }
@@ -987,11 +1205,25 @@
         pool_name = data_utils.rand_name("lb_member_pool3_cidrs")
         pool_kwargs = {
             const.NAME: pool_name,
-            const.PROTOCOL: self.protocol,
-            const.LB_ALGORITHM: self.lb_algorithm,
+            const.PROTOCOL: protocol,
+            const.LB_ALGORITHM: algorithm,
             const.LISTENER_ID: listener_id,
         }
-        pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        # This is a special case as the reference driver does not support
+        # SOURCE-IP-PORT. Since it runs with not_implemented_is_error, we must
+        # handle this test case special.
+        try:
+            pool = self.mem_pool_client.create_pool(**pool_kwargs)
+        except exceptions.NotImplemented as e:
+            if algorithm != const.LB_ALGORITHM_SOURCE_IP_PORT:
+                raise
+            message = ("The configured provider driver '{driver}' "
+                       "does not support a feature required for this "
+                       "test.".format(driver=CONF.load_balancer.provider))
+            if hasattr(e, 'resp_body'):
+                message = e.resp_body.get('faultstring', message)
+            raise testtools.TestCase.skipException(message)
+
         pool_id = pool[const.ID]
         self.addCleanup(
             self.mem_pool_client.cleanup_pool,
@@ -1052,8 +1284,13 @@
             CONF.load_balancer.check_timeout)
 
         # Send some traffic
+        members = 2
+        if algorithm == const.LB_ALGORITHM_SOURCE_IP:
+            members = 1
         self.check_members_balanced(
-            self.lb_vip_address, protocol_port=listener_port)
+            self.lb_vip_address, protocol=protocol,
+            protocol_port=protocol_port, persistent=False,
+            traffic_member_count=members, delay=delay)
 
         listener_kwargs = {
             const.LISTENER_ID: listener_id,
@@ -1066,21 +1303,27 @@
                                 CONF.load_balancer.build_interval,
                                 CONF.load_balancer.build_timeout)
 
-        url_for_vip = 'http://{}:{}/'.format(
-            self.lb_vip_address, listener_port)
-
         # NOTE: Before we start with the consistent response check, we must
         # wait until Neutron completes the SG update.
         # See https://bugs.launchpad.net/neutron/+bug/1866353.
-        def expect_conn_error(url):
+        def expect_timeout_error(address, protocol, protocol_port):
             try:
-                requests.Session().get(url)
-            except requests.exceptions.ConnectionError:
+                self.make_request(address, protocol=protocol,
+                                  protocol_port=protocol_port)
+            except exceptions.TimeoutException:
                 return True
             return False
 
-        waiters.wait_until_true(expect_conn_error, url=url_for_vip)
+        waiters.wait_until_true(
+            expect_timeout_error, address=self.lb_vip_address,
+            protocol=protocol, protocol_port=protocol_port)
 
         # Assert that the server is consistently unavailable
+        if protocol == const.UDP:
+            url_for_vip = 'udp://{}:{}/'.format(self.lb_vip_address,
+                                                protocol_port)
+        else:
+            url_for_vip = 'http://{}:{}/'.format(self.lb_vip_address,
+                                                 protocol_port)
         self.assertConsistentResponse(
-            (None, None), url_for_vip, repeat=3, conn_error=True)
+            (None, None), url_for_vip, repeat=3, expect_connection_error=True)
diff --git a/octavia_tempest_plugin/tests/test_base.py b/octavia_tempest_plugin/tests/test_base.py
index bd1a225..f260e88 100644
--- a/octavia_tempest_plugin/tests/test_base.py
+++ b/octavia_tempest_plugin/tests/test_base.py
@@ -12,18 +12,13 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
-import errno
 import ipaddress
 import pkg_resources
 import random
-import requests
 import shlex
-import six
-import socket
 import string
 import subprocess
 import tempfile
-import time
 
 from oslo_log import log as logging
 from oslo_utils import uuidutils
@@ -47,11 +42,8 @@
 RETRY_BACKOFF = 1
 RETRY_MAX = 5
 
-SRC_PORT_NUMBER_MIN = 32768
-SRC_PORT_NUMBER_MAX = 61000
 
-
-class LoadBalancerBaseTest(test.BaseTestCase):
+class LoadBalancerBaseTest(validators.ValidatorsMixin, test.BaseTestCase):
     """Base class for load balancer tests."""
 
     # Setup cls.os_roles_lb_member. cls.os_primary, cls.os_roles_lb_member,
@@ -66,6 +58,8 @@
     webserver2_response = 5
     used_ips = []
 
+    SRC_PORT_NUMBER_MIN = 32768
+    SRC_PORT_NUMBER_MAX = 61000
     src_port_number = SRC_PORT_NUMBER_MIN
 
     @classmethod
@@ -160,6 +154,7 @@
                 "Configuration value test_network_override must be "
                 "specified if test_subnet_override is used.")
 
+        # TODO(johnsom) Remove this
         # Get loadbalancing algorithms supported by provider driver.
         try:
             algorithms = const.SUPPORTED_LB_ALGORITHMS[
@@ -914,231 +909,20 @@
     @classmethod
     def _validate_webserver(cls, ip_address, start_id):
         URL = 'http://{0}'.format(ip_address)
-        validators.validate_URL_response(URL, expected_body=str(start_id))
+        cls.validate_URL_response(URL, expected_body=str(start_id))
         URL = 'http://{0}:81'.format(ip_address)
-        validators.validate_URL_response(URL, expected_body=str(start_id + 1))
+        cls.validate_URL_response(URL, expected_body=str(start_id + 1))
 
     @classmethod
     def _validate_udp_server(cls, ip_address, start_id):
-        res = cls._udp_request(ip_address, 80)
+        res = cls.make_udp_request(ip_address, 80)
         if res != str(start_id):
             raise Exception("Response from test server doesn't match the "
                             "expected value ({0} != {1}).".format(
                                 res, str(start_id)))
 
-        res = cls._udp_request(ip_address, 81)
+        res = cls.make_udp_request(ip_address, 81)
         if res != str(start_id + 1):
             raise Exception("Response from test server doesn't match the "
                             "expected value ({0} != {1}).".format(
                                 res, str(start_id + 1)))
-
-    @classmethod
-    def _udp_request(cls, vip_address, port=80, timeout=None):
-        if ipaddress.ip_address(vip_address).version == 6:
-            family = socket.AF_INET6
-        else:
-            family = socket.AF_INET
-
-        sock = socket.socket(family, socket.SOCK_DGRAM)
-
-        # Force the use of an incremental port number for source to avoid
-        # re-use of a previous source port that will affect the round-robin
-        # dispatch
-        while True:
-            port_number = cls.src_port_number
-            cls.src_port_number += 1
-            if cls.src_port_number >= SRC_PORT_NUMBER_MAX:
-                cls.src_port_number = SRC_PORT_NUMBER_MIN
-
-            # catch and skip already used ports on the host
-            try:
-                sock.bind(('', port_number))
-            except OSError as e:
-                # if error is 'Address already in use', try next port number
-                if e.errno != errno.EADDRINUSE:
-                    raise e
-            else:
-                # successfully bind the socket
-                break
-
-        server_address = (vip_address, port)
-        data = b"data\n"
-
-        if timeout is not None:
-            sock.settimeout(timeout)
-
-        sock.sendto(data, server_address)
-        data, addr = sock.recvfrom(4096)
-
-        sock.close()
-
-        return data.decode('utf-8')
-
-    def _wait_for_lb_functional(self, vip_address, traffic_member_count,
-                                protocol_port, protocol, verify):
-        if protocol != const.UDP:
-            session = requests.Session()
-        start = time.time()
-
-        response_counts = {}
-
-        # Send requests to the load balancer until at least
-        # "traffic_member_count" members have replied (ensure network
-        # connectivity is functional between the load balancer and the membesr)
-        while time.time() - start < CONF.load_balancer.build_timeout:
-            try:
-                if protocol != const.UDP:
-                    url = "{0}://{1}{2}{3}".format(
-                        protocol.lower(),
-                        vip_address,
-                        ':' if protocol_port else '',
-                        protocol_port or '')
-                    r = session.get(url, timeout=2, verify=verify)
-                    data = r.content
-                else:
-                    data = self._udp_request(vip_address, port=protocol_port,
-                                             timeout=2)
-                if data in response_counts:
-                    response_counts[data] += 1
-                else:
-                    response_counts[data] = 1
-
-                if traffic_member_count == len(response_counts):
-                    LOG.debug('Loadbalancer response totals: %s',
-                              response_counts)
-                    time.sleep(1)
-                    return
-            except Exception:
-                LOG.warning('Server is not passing initial traffic. Waiting.')
-                time.sleep(1)
-
-        LOG.debug('Loadbalancer response totals: %s', response_counts)
-        LOG.error('Server did not begin passing traffic within the timeout '
-                  'period. Failing test.')
-        raise Exception()
-
-    def _send_lb_request(self, handler, protocol, vip_address,
-                         verify, protocol_port, num=20):
-        response_counts = {}
-
-        # Send a number requests to lb vip
-        for i in range(num):
-            try:
-                if protocol != const.UDP:
-                    url = "{0}://{1}{2}{3}".format(
-                        protocol.lower(),
-                        vip_address,
-                        ':' if protocol_port else '',
-                        protocol_port or '')
-                    r = handler.get(url, timeout=2, verify=verify)
-                    data = r.content
-                else:
-                    data = self._udp_request(vip_address, port=protocol_port,
-                                             timeout=2)
-
-                if data in response_counts:
-                    response_counts[data] += 1
-                else:
-                    response_counts[data] = 1
-
-            except Exception:
-                LOG.exception('Failed to send request to loadbalancer vip')
-                raise Exception('Failed to connect to lb')
-        LOG.debug('Loadbalancer response totals: %s', response_counts)
-        return response_counts
-
-    def _check_members_balanced_round_robin(
-            self, vip_address, traffic_member_count=2, protocol=const.HTTP,
-            verify=True, protocol_port=80):
-
-        handler = requests.Session()
-        response_counts = self._send_lb_request(
-            handler, protocol, vip_address,
-            verify, protocol_port)
-
-        # Ensure the correct number of members
-        self.assertEqual(traffic_member_count, len(response_counts))
-
-        # Ensure both members got the same number of responses
-        self.assertEqual(1, len(set(response_counts.values())))
-
-    def _check_members_balanced_source_ip_port(
-            self, vip_address, traffic_member_count=2, protocol=const.HTTP,
-            verify=True, protocol_port=80):
-
-        handler = requests
-        response_counts = self._send_lb_request(
-            handler, protocol, vip_address,
-            verify, protocol_port)
-        # Ensure the correct number of members
-        self.assertEqual(traffic_member_count, len(response_counts))
-
-        if CONF.load_balancer.test_reuse_connection:
-            handler = requests.Session()
-            response_counts = self._send_lb_request(
-                handler, protocol, vip_address,
-                verify, protocol_port)
-            # Ensure only one member answered
-            self.assertEqual(1, len(response_counts))
-
-    def check_members_balanced(self, vip_address, traffic_member_count=2,
-                               protocol=const.HTTP, verify=True,
-                               protocol_port=80):
-
-        if (ipaddress.ip_address(vip_address).version == 6 and
-                protocol != const.UDP):
-            vip_address = '[{}]'.format(vip_address)
-        self._wait_for_lb_functional(vip_address, traffic_member_count,
-                                     protocol_port, protocol, verify)
-
-        validate_func = '_check_members_balanced_%s' % self.lb_algorithm
-        validate_func = getattr(self, validate_func.lower())
-        validate_func(
-            vip_address=vip_address,
-            traffic_member_count=traffic_member_count,
-            protocol=protocol,
-            verify=verify,
-            protocol_port=protocol_port)
-
-    def assertConsistentResponse(self, response, url, method='GET', repeat=10,
-                                 redirect=False, timeout=2,
-                                 conn_error=False, **kwargs):
-        """Assert that a request to URL gets the expected response.
-
-        :param response: Expected response in format (status_code, content).
-        :param url: The URL to request.
-        :param method: The HTTP method to use (GET, POST, PUT, etc)
-        :param repeat: How many times to test the response.
-        :param data: Optional data to send in the request.
-        :param headers: Optional headers to send in the request.
-        :param cookies: Optional cookies to send in the request.
-        :param redirect: Is the request a redirect? If true, assume the passed
-                         content should be the next URL in the chain.
-        :param timeout: Optional seconds to wait for the server to send data.
-        :param conn_error: Optional Expect a connection error?
-
-        :return: boolean success status
-
-        :raises: testtools.matchers.MismatchError
-        """
-        session = requests.Session()
-        response_code, response_content = response
-
-        for i in range(0, repeat):
-            if conn_error:
-                self.assertRaises(
-                    requests.exceptions.ConnectionError, session.request,
-                    method, url, allow_redirects=not redirect, timeout=timeout,
-                    **kwargs)
-                continue
-
-            req = session.request(method, url, allow_redirects=not redirect,
-                                  timeout=timeout, **kwargs)
-            if response_code:
-                self.assertEqual(response_code, req.status_code)
-            if redirect:
-                self.assertTrue(req.is_redirect)
-                self.assertEqual(response_content,
-                                 session.get_redirect_target(req))
-            elif response_content:
-                self.assertEqual(six.text_type(response_content), req.text)
diff --git a/octavia_tempest_plugin/tests/validators.py b/octavia_tempest_plugin/tests/validators.py
index 773fcc4..a93e2eb 100644
--- a/octavia_tempest_plugin/tests/validators.py
+++ b/octavia_tempest_plugin/tests/validators.py
@@ -1,6 +1,7 @@
 # Copyright 2017 GoDaddy
 # Copyright 2017 Catalyst IT Ltd
 # Copyright 2018 Rackspace US Inc.  All rights reserved.
+# Copyright 2020 Red Hat, Inc. All rights reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    not use this file except in compliance with the License. You may obtain
@@ -13,44 +14,68 @@
 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 #    License for the specific language governing permissions and limitations
 #    under the License.
-
+import errno
+import ipaddress
 import requests
+import socket
 import time
+from urllib.parse import urlparse
 
 from oslo_log import log as logging
 from tempest import config
 from tempest.lib import exceptions
+from tempest import test
+
+from octavia_tempest_plugin.common import constants as const
+from octavia_tempest_plugin.common import requests_adapters
 
 CONF = config.CONF
 LOG = logging.getLogger(__name__)
 
 
-def validate_URL_response(URL, expected_status_code=200,
-                          expected_body=None, HTTPS_verify=True,
-                          client_cert_path=None, CA_certs_path=None,
-                          request_interval=CONF.load_balancer.build_interval,
-                          request_timeout=CONF.load_balancer.build_timeout):
-    """Check a URL response (HTTP or HTTPS).
+class ValidatorsMixin(test.BaseTestCase):
 
-    :param URL: The URL to query.
-    :param expected_status_code: The expected HTTP status code.
-    :param expected_body: The expected response text, None will not compare.
-    :param HTTPS_verify: Should we verify the HTTPS server.
-    :param client_cert_path: Filesystem path to a file with the client private
-                             key and certificate.
-    :param CA_certs_path: Filesystem path to a file containing CA certificates
-                          to use for HTTPS validation.
-    :param request_interval: Time, in seconds, to timeout a request.
-    :param request_timeout: The maximum time, in seconds, to attempt requests.
-                            Failed validation of expected results does not
-                            result in a retry.
-    :raises InvalidHttpSuccessCode: The expected_status_code did not match.
-    :raises InvalidHTTPResponseBody: The response body did not match the
-                                     expected content.
-    :raises TimeoutException: The request timed out.
-    :returns: None
-    """
-    with requests.Session() as session:
+    @staticmethod
+    def validate_URL_response(
+            URL, expected_status_code=200, requests_session=None,
+            expected_body=None, HTTPS_verify=True, client_cert_path=None,
+            CA_certs_path=None, source_port=None,
+            request_interval=CONF.load_balancer.build_interval,
+            request_timeout=CONF.load_balancer.build_timeout):
+        """Check a URL response (HTTP or HTTPS).
+
+        :param URL: The URL to query.
+        :param expected_status_code: The expected HTTP status code.
+        :param requests_session: A requests session to use for the request.
+                                 If None, a new session will be created.
+        :param expected_body: The expected response text, None will not
+                              compare.
+        :param HTTPS_verify: Should we verify the HTTPS server.
+        :param client_cert_path: Filesystem path to a file with the client
+                                 private key and certificate.
+        :param CA_certs_path: Filesystem path to a file containing CA
+                              certificates to use for HTTPS validation.
+        :param source_port: If set, the request will come from this source port
+                            number. If None, a random port will be used.
+        :param request_interval: Time, in seconds, to timeout a request.
+        :param request_timeout: The maximum time, in seconds, to attempt
+                                requests.  Failed validation of expected
+                                results does not result in a retry.
+        :raises InvalidHttpSuccessCode: The expected_status_code did not match.
+        :raises InvalidHTTPResponseBody: The response body did not match the
+                                         expected content.
+        :raises TimeoutException: The request timed out.
+        :returns: The response data.
+        """
+        session = requests_session
+        if requests_session is None:
+            session = requests.Session()
+        if source_port:
+            session.mount('http://',
+                          requests_adapters.SourcePortAdapter(source_port))
+            session.mount('https://',
+                          requests_adapters.SourcePortAdapter(source_port))
+
         session_kwargs = {}
         if not HTTPS_verify:
             session_kwargs['verify'] = False
@@ -63,25 +88,333 @@
         while time.time() - start < request_timeout:
             try:
                 response = session.get(URL, **session_kwargs)
-                if response.status_code != expected_status_code:
+                response_status_code = response.status_code
+                response_text = response.text
+                response.close()
+                if response_status_code != expected_status_code:
                     raise exceptions.InvalidHttpSuccessCode(
                         '{0} is not the expected code {1}'.format(
-                            response.status_code, expected_status_code))
-                if expected_body and response.text != expected_body:
+                            response_status_code, expected_status_code))
+                if expected_body and response_text != expected_body:
                     details = '{} does not match expected {}'.format(
-                        response.text, expected_body)
+                        response_text, expected_body)
                     raise exceptions.InvalidHTTPResponseBody(
                         resp_body=details)
-                return
+                if requests_session is None:
+                    session.close()
+                return response_text
             except requests.exceptions.Timeout:
                 # Don't sleep as we have already waited the interval.
                 LOG.info('Request for {} timed out. Retrying.'.format(URL))
             except (exceptions.InvalidHttpSuccessCode,
                     exceptions.InvalidHTTPResponseBody,
                     requests.exceptions.SSLError):
+                if requests_session is None:
+                    session.close()
                 raise
             except Exception as e:
                 LOG.info('Validate URL got exception: {0}. '
                          'Retrying.'.format(e))
                 time.sleep(request_interval)
+        if requests_session is None:
+            session.close()
         raise exceptions.TimeoutException()
+
+    @classmethod
+    def make_udp_request(cls, vip_address, port=80, timeout=None,
+                         source_port=None):
+        if ipaddress.ip_address(vip_address).version == 6:
+            family = socket.AF_INET6
+        else:
+            family = socket.AF_INET
+
+        sock = socket.socket(family, socket.SOCK_DGRAM)
+
+        # Force the use of an incremental port number for source to avoid
+        # re-use of a previous source port that will affect the round-robin
+        # dispatch
+        while True:
+            port_number = cls.src_port_number
+            cls.src_port_number += 1
+            if cls.src_port_number >= cls.SRC_PORT_NUMBER_MAX:
+                cls.src_port_number = cls.SRC_PORT_NUMBER_MIN
+
+            # catch and skip already used ports on the host
+            try:
+                if source_port:
+                    sock.bind(('', source_port))
+                else:
+                    sock.bind(('', port_number))
+            except OSError as e:
+                # if error is 'Address already in use', try next port number
+                # If source_port is defined and already in use, a test
+                # developer has made a mistake by using a duplicate source
+                # port.
+                if e.errno != errno.EADDRINUSE or source_port:
+                    raise e
+            else:
+                # successfully bind the socket
+                break
+
+        server_address = (vip_address, port)
+        data = b"data\n"
+
+        if timeout is not None:
+            sock.settimeout(timeout)
+
+        try:
+            sock.sendto(data, server_address)
+            data, addr = sock.recvfrom(4096)
+        except socket.timeout:
+            # Normalize the timeout exception so that UDP and other protocol
+            # tests all return a common timeout exception.
+            raise exceptions.TimeoutException()
+        finally:
+            sock.close()
+
+        return data.decode('utf-8')
+
+    def make_request(
+            self, vip_address, protocol=const.HTTP, HTTPS_verify=True,
+            protocol_port=80, requests_session=None, client_cert_path=None,
+            CA_certs_path=None, request_timeout=2, source_port=None):
+        """Make a request to a VIP.
+
+        :param vip_address: The VIP address to test.
+        :param protocol: The protocol to use for the test.
+        :param HTTPS_verify: How to verify the TLS certificate. True: verify
+                             using the system CA certificates. False: Do not
+                             verify the VIP certificate. <path>: Filesytem path
+                             to a CA certificate bundle file or directory. For
+                             directories, the directory must be processed using
+                             the c_rehash utility from openssl.
+        :param protocol_port: The port number to use for the test.
+        :param requests_session: A requests session to use for the request.
+                                 If None, a new session will be created.
+        :param request_timeout: The maximum time, in seconds, to attempt
+                                requests.
+        :param client_cert_path: Filesystem path to a file with the client
+                                 private key and certificate.
+        :param CA_certs_path: Filesystem path to a file containing CA
+                              certificates to use for HTTPS validation.
+        :param source_port: If set, the request will come from this source port
+                            number. If None, a random port will be used.
+        :raises InvalidHttpSuccessCode: The expected_status_code did not match.
+        :raises InvalidHTTPResponseBody: The response body did not match the
+                                         expected content.
+        :raises TimeoutException: The request timed out.
+        :raises Exception: If a protocol is requested that is not implemented.
+        :returns: The response data.
+        """
+        # Note: We are using HTTP as the TCP protocol check to simplify
+        #       the test setup. HTTP is a TCP based protocol.
+        if protocol == const.HTTP or protocol == const.TCP:
+            url = "http://{0}{1}{2}".format(
+                vip_address, ':' if protocol_port else '',
+                protocol_port or '')
+            data = self.validate_URL_response(
+                url, HTTPS_verify=False, requests_session=requests_session,
+                request_timeout=request_timeout,
+                source_port=source_port)
+        elif (protocol == const.HTTPS or
+              protocol == const.TERMINATED_HTTPS):
+            url = "https://{0}{1}{2}".format(
+                vip_address, ':' if protocol_port else '',
+                protocol_port or '')
+            data = self.validate_URL_response(
+                url, HTTPS_verify=HTTPS_verify,
+                requests_session=requests_session,
+                client_cert_path=client_cert_path,
+                CA_certs_path=CA_certs_path, source_port=source_port,
+                request_timeout=request_timeout)
+        elif protocol == const.UDP:
+            data = self.make_udp_request(
+                vip_address, port=protocol_port, timeout=request_timeout,
+                source_port=source_port)
+        else:
+            message = ("Unknown protocol %s. Unable to check if the "
+                       "load balancer is balanced.", protocol)
+            LOG.error(message)
+            raise Exception(message)
+        return data
+
+    def check_members_balanced(
+            self, vip_address, traffic_member_count=2, protocol=const.HTTP,
+            HTTPS_verify=True, protocol_port=80, persistent=True, repeat=20,
+            client_cert_path=None, CA_certs_path=None, request_interval=2,
+            request_timeout=10, source_port=None, delay=None):
+        """Checks that members are evenly balanced behind a VIP.
+
+        :param vip_address: The VIP address to test.
+        :param traffic_member_count: The expected number of members.
+        :param protocol: The protocol to use for the test.
+        :param HTTPS_verify: How to verify the TLS certificate. True: verify
+                             using the system CA certificates. False: Do not
+                             verify the VIP certificate. <path>: Filesytem path
+                             to a CA certificate bundle file or directory. For
+                             directories, the directory must be processed using
+                             the c_rehash utility from openssl.
+        :param protocol_port: The port number to use for the test.
+        :param persistent: True when the test should persist cookies and use
+                           the protocol keepalive mechanism with the target.
+                           This may include maintaining a connection to the
+                           member server across requests.
+        :param repeat: The number of requests to make against the VIP.
+        :param request_timeout: The maximum time, in seconds, to attempt
+                                requests.
+        :param client_cert_path: Filesystem path to a file with the client
+                                 private key and certificate.
+        :param CA_certs_path: Filesystem path to a file containing CA
+                              certificates to use for HTTPS validation.
+        :param source_port: If set, the request will come from this source port
+                            number. If None, a random port will be used.
+        :param delay: The time to pause between requests in seconds, can be
+                      fractional.
+        """
+        if (ipaddress.ip_address(vip_address).version == 6 and
+                protocol != const.UDP):
+            vip_address = '[{}]'.format(vip_address)
+
+        requests_session = None
+        if persistent:
+            requests_session = requests.Session()
+
+        self._wait_for_lb_functional(
+            vip_address, traffic_member_count, protocol_port, protocol,
+            HTTPS_verify, requests_session=requests_session,
+            source_port=source_port)
+
+        response_counts = {}
+        # Send a number requests to lb vip
+        for i in range(repeat):
+            try:
+                data = self.make_request(
+                    vip_address, protocol=protocol, HTTPS_verify=HTTPS_verify,
+                    protocol_port=protocol_port,
+                    requests_session=requests_session,
+                    client_cert_path=client_cert_path,
+                    CA_certs_path=CA_certs_path, source_port=source_port,
+                    request_timeout=request_timeout)
+
+                if data in response_counts:
+                    response_counts[data] += 1
+                else:
+                    response_counts[data] = 1
+                if delay is not None:
+                    time.sleep(delay)
+            except Exception:
+                LOG.exception('Failed to send request to loadbalancer vip')
+                if persistent:
+                    requests_session.close()
+                raise Exception('Failed to connect to lb')
+        if persistent:
+            requests_session.close()
+        LOG.debug('Loadbalancer response totals: %s', response_counts)
+
+        # Ensure the correct number of members responded
+        self.assertEqual(traffic_member_count, len(response_counts))
+
+        # Ensure both members got the same number of responses
+        self.assertEqual(1, len(set(response_counts.values())))
+
+    def assertConsistentResponse(self, response, url, method='GET', repeat=10,
+                                 redirect=False, timeout=2,
+                                 expect_connection_error=False, **kwargs):
+        """Assert that a request to URL gets the expected response.
+
+        :param response: Expected response in format (status_code, content).
+        :param url: The URL to request.
+        :param method: The HTTP method to use (GET, POST, PUT, etc)
+        :param repeat: How many times to test the response.
+        :param data: Optional data to send in the request.
+        :param headers: Optional headers to send in the request.
+        :param cookies: Optional cookies to send in the request.
+        :param redirect: Is the request a redirect? If true, assume the passed
+                         content should be the next URL in the chain.
+        :param timeout: Optional seconds to wait for the server to send data.
+        :param expect_connection_error: Should we expect a connection error
+        :param expect_timeout: Should we expect a connection timeout
+
+        :return: boolean success status
+
+        :raises: testtools.matchers.MismatchError
+        """
+        session = requests.Session()
+        response_code, response_content = response
+
+        for i in range(repeat):
+            if url.startswith(const.HTTP.lower()):
+                if expect_connection_error:
+                    self.assertRaises(
+                        requests.exceptions.ConnectionError, session.request,
+                        method, url, allow_redirects=not redirect,
+                        timeout=timeout, **kwargs)
+                    continue
+
+                req = session.request(method, url,
+                                      allow_redirects=not redirect,
+                                      timeout=timeout, **kwargs)
+                if response_code:
+                    self.assertEqual(response_code, req.status_code)
+                if redirect:
+                    self.assertTrue(req.is_redirect)
+                    self.assertEqual(response_content,
+                                     session.get_redirect_target(req))
+                elif response_content:
+                    self.assertEqual(str(response_content), req.text)
+            elif url.startswith(const.UDP.lower()):
+                parsed_url = urlparse(url)
+                if expect_connection_error:
+                    self.assertRaises(exceptions.TimeoutException,
+                                      self.make_udp_request,
+                                      parsed_url.hostname,
+                                      port=parsed_url.port, timeout=timeout)
+                    continue
+
+                data = self.make_udp_request(parsed_url.hostname,
+                                             port=parsed_url.port,
+                                             timeout=timeout)
+                self.assertEqual(response_content, data)
+
+    def _wait_for_lb_functional(
+            self, vip_address, traffic_member_count, protocol_port, protocol,
+            HTTPS_verify, client_cert_path=None, CA_certs_path=None,
+            request_interval=2, request_timeout=10, requests_session=None,
+            source_port=None):
+        start = time.time()
+        response_counts = {}
+
+        # Send requests to the load balancer until at least
+        # "traffic_member_count" members have replied (ensure network
+        # connectivity is functional between the load balancer and the members)
+        while time.time() - start < CONF.load_balancer.build_timeout:
+            try:
+                data = self.make_request(
+                    vip_address, protocol=protocol, HTTPS_verify=HTTPS_verify,
+                    protocol_port=protocol_port,
+                    client_cert_path=client_cert_path,
+                    CA_certs_path=CA_certs_path, source_port=source_port,
+                    request_timeout=request_timeout,
+                    requests_session=requests_session)
+
+                if data in response_counts:
+                    response_counts[data] += 1
+                else:
+                    response_counts[data] = 1
+
+                if traffic_member_count == len(response_counts):
+                    LOG.debug('Loadbalancer response totals: %s',
+                              response_counts)
+                    time.sleep(1)
+                    return
+            except Exception:
+                LOG.warning('Server is not passing initial traffic. Waiting.')
+                time.sleep(1)
+
+        LOG.debug('Loadbalancer wait for load balancer response totals: %s',
+                  response_counts)
+        message = ('Server %s on port %s did not begin passing traffic within '
+                   'the timeout period. Failing test.' % (vip_address,
+                                                          protocol_port))
+        LOG.error(message)
+        raise Exception(message)
diff --git a/requirements.txt b/requirements.txt
index e5b93b8..8376df6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -13,7 +13,6 @@
 python-barbicanclient>=4.5.2 # Apache-2.0
 pyOpenSSL>=17.1.0 # Apache-2.0
 requests>=2.14.2  # Apache-2.0
-six>=1.10.0 # MIT
 tempest>=17.1.0 # Apache-2.0
 tenacity>=4.4.0 # Apache-2.0
 testtools>=2.2.0 # MIT
diff --git a/tox.ini b/tox.ini
index a419c62..86d81b1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,9 +1,11 @@
 [tox]
-minversion = 2.0
+minversion = 3.1
 envlist = pep8
 skipsdist = True
+ignore_basepython_conflict = True
 
 [testenv]
+basepython = python3
 usedevelop = True
 install_command = pip install {opts} {packages}
 setenv =
@@ -16,15 +18,17 @@
   stestr slowest
 
 [testenv:pep8]
-basepython = python3
-commands = flake8
+commands =
+  flake8
+  check-uuid --package octavia_tempest_plugin
+
+[testenv:uuidgen]
+commands = check-uuid --fix --package octavia_tempest_plugin
 
 [testenv:venv]
-basepython = python3
 commands = {posargs}
 
 [testenv:cover]
-basepython = python3
 setenv =
   {[testenv]setenv}
   PYTHON=coverage run --source octavia_tempest_plugin --parallel-mode
@@ -39,7 +43,6 @@
   coverage xml -o cover/coverage.xml
 
 [testenv:docs]
-basepython = python3
 deps =
     -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
     -r{toxinidir}/requirements.txt
@@ -51,7 +54,6 @@
   sphinx-build -W -b html doc/source doc/build/html
 
 [testenv:pdf-docs]
-basepython = python3
 deps = {[testenv:docs]deps}
 whitelist_externals =
   make
@@ -62,7 +64,6 @@
   make -C doc/build/pdf
 
 [testenv:releasenotes]
-basepython = python3
 deps =
     -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
     -r{toxinidir}/requirements.txt
@@ -71,7 +72,6 @@
   sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
 
 [testenv:debug]
-basepython = python3
 commands = oslo_debug_helper {posargs}
 
 [flake8]
@@ -105,9 +105,15 @@
   ./octavia_tempest_plugin/hacking
 
 [testenv:genconfig]
-basepython = python3
 whitelist_externals = mkdir
 commands =
          mkdir -p etc
          oslo-config-generator --output-file etc/octavia.tempest.conf.sample \
          --namespace tempest.config
+
+[testenv:requirements]
+deps =
+  -egit+https://opendev.org/openstack/requirements#egg=openstack-requirements
+whitelist_externals = sh
+commands =
+    sh -c '{envdir}/src/openstack-requirements/playbooks/files/project-requirements-change.py --req {envdir}/src/openstack-requirements --local {toxinidir} master'
diff --git a/zuul.d/jobs.yaml b/zuul.d/jobs.yaml
index 305e5aa..523802e 100644
--- a/zuul.d/jobs.yaml
+++ b/zuul.d/jobs.yaml
@@ -195,6 +195,8 @@
             load_balancer:
               check_interval: 1
               check_timeout: 180
+            loadbalancer-feature-enabled:
+              not_implemented_is_error: True
       devstack_services:
         neutron-qos: true
       devstack_plugins:
@@ -397,6 +399,9 @@
           "$TEMPEST_CONFIG":
             load_balancer:
               test_with_noop: True
+              build_interval: 0.5
+              check_interval: 0.5
+              lb_build_interval: 0.5
               # AZ API tests with no-op need AZs configured but they do not
               # need to actually exist in Nova due to the no-op driver.
               availability_zone: bogus-az-1
@@ -427,7 +432,7 @@
               enabled: True
             audit_middleware_notifications:
               driver: log
-      tempest_concurrency: 2
+      tempest_concurrency: 4
       tempest_test_regex: ^octavia_tempest_plugin.tests.api.v2
       tox_envlist: all
     irrelevant-files:
@@ -580,6 +585,10 @@
       tempest_test_regex: ^octavia_tempest_plugin.tests.scenario.v2
       tox_envlist: all
       devstack_local_conf:
+        post-config:
+            $OCTAVIA_CONF:
+              nova:
+                enable_anti_affinity: True
         test-config:
           "$TEMPEST_CONFIG":
             load_balancer:
@@ -867,3 +876,62 @@
     required-projects:
       - name: openstack/diskimage-builder
         override-checkout: 2.30.0
+
+######### Third party jobs ##########
+
+- job:
+    name: neutron-ovn-provider-v2-api
+    parent: ovn-octavia-provider-v2-dsvm-scenario
+    description: Runs the neutron OVN provider driver for Octavia api test.
+    voting: false
+    timeout: 5400
+    attempts: 1
+    tags: ovn-octavia-provider
+    irrelevant-files:
+      - ^.*\.rst$
+      - ^api-ref/.*$
+      - ^doc/.*$
+      - ^etc/.*$
+      - ^releasenotes/.*$
+      - ^octavia/amphorae/.*$
+      - ^octavia/api/drivers/amphora_driver/.*$
+      - ^octavia/compute/.*$
+      - ^octavia/controller/.*$
+      - ^octavia/distributor/.*$
+      - ^octavia/volume/.*$
+      - ^octavia/tests/.*$
+    vars:
+      tempest_test_regex: ^octavia_tempest_plugin.tests.api.v2
+      devstack_local_conf:
+        test-config:
+          "$TEMPEST_CONFIG":
+            loadbalancer-feature-enabled:
+              not_implemented_is_error: False
+
+- job:
+    name: neutron-ovn-provider-v2-scenario
+    parent: ovn-octavia-provider-v2-dsvm-scenario
+    description: Runs the neutron OVN provider driver for Octavia scenario test.
+    voting: false
+    timeout: 5400
+    attempts: 1
+    tags: ovn-octavia-provider
+    irrelevant-files:
+      - ^.*\.rst$
+      - ^api-ref/.*$
+      - ^doc/.*$
+      - ^etc/.*$
+      - ^releasenotes/.*$
+      - ^octavia/amphorae/.*$
+      - ^octavia/api/drivers/amphora_driver/.*$
+      - ^octavia/compute/.*$
+      - ^octavia/controller/.*$
+      - ^octavia/distributor/.*$
+      - ^octavia/volume/.*$
+      - ^octavia/tests/.*$
+    vars:
+      devstack_local_conf:
+        test-config:
+          "$TEMPEST_CONFIG":
+            loadbalancer-feature-enabled:
+              not_implemented_is_error: False
diff --git a/zuul.d/projects.yaml b/zuul.d/projects.yaml
index ec11f85..d66f616 100644
--- a/zuul.d/projects.yaml
+++ b/zuul.d/projects.yaml
@@ -44,6 +44,11 @@
             voting: false
         - octavia-v2-dsvm-cinder-amphora:
             voting: false
+        # Third party provider jobs
+        - neutron-ovn-provider-v2-api:
+            voting: false
+        - neutron-ovn-provider-v2-scenario:
+            voting: false
     gate:
       fail-fast: true
       queue: octavia