Add missing cleanups after octavia resources creation

In some of the tests, cleanups were missing after creating Octavia
resources.

This patch adds the missing cleanups.

Change-Id: I848e08295580709fa739df2d216dad0efd19a795
diff --git a/octavia_tempest_plugin/tests/api/v2/test_l7policy.py b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
index 5a19def..47b2faa 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_l7policy.py
@@ -62,6 +62,11 @@
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
         cls.listener_id = listener[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_listener_client.cleanup_listener,
+            cls.listener_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -79,6 +84,11 @@
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
         cls.pool_id = pool[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            cls.pool_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -153,6 +163,11 @@
 
         l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
 
+        self.addClassResourceCleanup(
+            self.mem_l7policy_client.cleanup_l7policy,
+            l7policy[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -610,6 +625,11 @@
 
         l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
 
+        self.addClassResourceCleanup(
+            self.mem_l7policy_client.cleanup_l7policy,
+            l7policy[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -721,6 +741,11 @@
 
         l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
 
+        self.addClassResourceCleanup(
+            self.mem_l7policy_client.cleanup_l7policy,
+            l7policy[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -860,6 +885,11 @@
         }
         l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
 
+        self.addClassResourceCleanup(
+            self.mem_l7policy_client.cleanup_l7policy,
+            l7policy[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer,
             self.lb_id, const.PROVISIONING_STATUS,
diff --git a/octavia_tempest_plugin/tests/api/v2/test_listener.py b/octavia_tempest_plugin/tests/api/v2/test_listener.py
index ac6f26a..b60fb20 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_listener.py
@@ -187,6 +187,11 @@
 
         listener = self.mem_listener_client.create_listener(**listener_kwargs)
 
+        self.addCleanup(
+            self.mem_listener_client.cleanup_listener,
+            listener[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -900,6 +905,11 @@
 
         listener = self.mem_listener_client.create_listener(**listener_kwargs)
 
+        self.addCleanup(
+            self.mem_listener_client.cleanup_listener,
+            listener[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1068,6 +1078,11 @@
 
         listener = self.mem_listener_client.create_listener(**listener_kwargs)
 
+        self.addCleanup(
+            self.mem_listener_client.cleanup_listener,
+            listener[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1294,6 +1309,11 @@
         }
         listener = self.mem_listener_client.create_listener(**listener_kwargs)
 
+        self.addCleanup(
+            self.mem_listener_client.cleanup_listener,
+            listener[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer,
             self.lb_id, const.PROVISIONING_STATUS,
diff --git a/octavia_tempest_plugin/tests/api/v2/test_member.py b/octavia_tempest_plugin/tests/api/v2/test_member.py
index 305aa39..8b25fc7 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_member.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_member.py
@@ -104,6 +104,11 @@
         cls.current_listener_port += 1
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
 
+        cls.addClassResourceCleanup(
+            cls.mem_listener_client.cleanup_listener,
+            listener[const.ID],
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -133,6 +138,11 @@
                 message = e.resp_body.get('faultstring', message)
             raise testtools.TestCase.skipException(message)
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            pool[const.ID],
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -916,6 +926,11 @@
 
         member = self.mem_member_client.create_member(**member_kwargs)
 
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1756,6 +1771,11 @@
 
         member = self.mem_member_client.create_member(**member_kwargs)
 
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -2199,6 +2219,11 @@
 
         member = self.mem_member_client.create_member(**member_kwargs)
 
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -2642,6 +2667,11 @@
                 const.ID]
         member1 = self.mem_member_client.create_member(**member1_kwargs)
 
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member1[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                 self.lb_id,
                                 const.PROVISIONING_STATUS,
@@ -2674,6 +2704,11 @@
 
         member2 = self.mem_member_client.create_member(**member2_kwargs)
 
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member2[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                 self.lb_id,
                                 const.PROVISIONING_STATUS,
@@ -2947,6 +2982,11 @@
         }
         member = self.mem_member_client.create_member(**member_kwargs)
 
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer,
             self.lb_id, const.PROVISIONING_STATUS,
diff --git a/octavia_tempest_plugin/tests/api/v2/test_pool.py b/octavia_tempest_plugin/tests/api/v2/test_pool.py
index db86ebe..4e3e667 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_pool.py
@@ -359,6 +359,11 @@
             listener = self.mem_listener_client.create_listener(
                 **listener_kwargs)
 
+            self.addClassResourceCleanup(
+                self.mem_listener_client.cleanup_listener,
+                listener[const.ID],
+                lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
             waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                     self.lb_id, const.PROVISIONING_STATUS,
                                     const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py b/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
index 0c23cd7..511c724 100644
--- a/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
+++ b/octavia_tempest_plugin/tests/barbican_scenario/v2/test_tls_barbican.py
@@ -240,6 +240,11 @@
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
         cls.pool_id = pool[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            cls.pool_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -258,7 +263,13 @@
         if cls.lb_member_1_subnet:
             member1_kwargs[const.SUBNET_ID] = cls.lb_member_1_subnet[const.ID]
 
-        cls.mem_member_client.create_member(**member1_kwargs)
+        member1 = cls.mem_member_client.create_member(**member1_kwargs)
+
+        cls.addClassResourceCleanup(
+            cls.mem_member_client.cleanup_member,
+            member1[const.ID], cls.pool_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(
             cls.mem_lb_client.show_loadbalancer, cls.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -277,7 +288,13 @@
         if cls.lb_member_2_subnet:
             member2_kwargs[const.SUBNET_ID] = cls.lb_member_2_subnet[const.ID]
 
-        cls.mem_member_client.create_member(**member2_kwargs)
+        member2 = cls.mem_member_client.create_member(**member2_kwargs)
+
+        cls.addClassResourceCleanup(
+            cls.mem_member_client.cleanup_member,
+            member2[const.ID], cls.pool_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(
             cls.mem_lb_client.show_loadbalancer, cls.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1268,6 +1285,11 @@
         pool = self.mem_pool_client.create_pool(**pool_kwargs)
         pool_id = pool[const.ID]
 
+        self.addClassResourceCleanup(
+            self.mem_pool_client.cleanup_pool,
+            pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                 self.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -1286,7 +1308,13 @@
         if self.lb_member_1_subnet:
             member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
 
-        self.mem_member_client.create_member(**member1_kwargs)
+        member1 = self.mem_member_client.create_member(**member1_kwargs)
+
+        self.addClassResourceCleanup(
+            self.mem_member_client.cleanup_member,
+            member1[const.ID], pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1305,7 +1333,13 @@
         if self.lb_member_2_subnet:
             member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
 
-        self.mem_member_client.create_member(**member2_kwargs)
+        member2 = self.mem_member_client.create_member(**member2_kwargs)
+
+        self.addClassResourceCleanup(
+            self.mem_member_client.cleanup_member,
+            member2[const.ID], pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1324,6 +1358,11 @@
         listener = self.mem_listener_client.create_listener(**listener_kwargs)
         self.listener_id = listener[const.ID]
 
+        self.addCleanup(
+            self.mem_listener_client.cleanup_listener,
+            self.listener_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                 self.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -1420,6 +1459,11 @@
         pool = self.mem_pool_client.create_pool(**pool_kwargs)
         pool_id = pool[const.ID]
 
+        self.addClassResourceCleanup(
+            self.mem_pool_client.cleanup_pool,
+            pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                 self.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -1472,6 +1516,12 @@
             member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
 
         member1 = self.mem_member_client.create_member(**member1_kwargs)
+
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member1[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1492,6 +1542,12 @@
             member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
 
         member2 = self.mem_member_client.create_member(**member2_kwargs)
+
+        self.addCleanup(
+            self.mem_member_client.cleanup_member,
+            member2[const.ID], pool_id=pool_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1510,6 +1566,11 @@
         listener = self.mem_listener_client.create_listener(**listener_kwargs)
         self.listener_id = listener[const.ID]
 
+        self.addCleanup(
+            self.mem_listener_client.cleanup_listener,
+            self.listener_id,
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                 self.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py b/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
index 8075a35..01548c2 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_healthmonitor.py
@@ -269,6 +269,11 @@
         }
         pool = self.mem_pool_client.create_pool(**pool_kwargs)
 
+        self.addClassResourceCleanup(
+            self.mem_pool_client.cleanup_pool,
+            pool[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                 self.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -295,6 +300,11 @@
 
         hm = self.mem_healthmonitor_client.create_healthmonitor(**hm_kwargs)
 
+        self.addClassResourceCleanup(
+            self.mem_healthmonitor_client.cleanup_healthmonitor,
+            hm[const.ID],
+            lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
         waiters.wait_for_status(
             self.mem_lb_client.show_loadbalancer, self.lb_id,
             const.PROVISIONING_STATUS, const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py b/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
index 69c1f2b..d4eaa86 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_ipv6_traffic_ops.py
@@ -104,6 +104,11 @@
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
         cls.listener_ids[protocol] = listener[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_listener_client.cleanup_listener,
+            listener[const.ID],
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -120,6 +125,11 @@
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
         cls.pool_ids[protocol] = pool[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            pool[const.ID],
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
index acbd094..4c84ba3 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_l7policy.py
@@ -61,6 +61,11 @@
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
         cls.listener_id = listener[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_listener_client.cleanup_listener,
+            cls.listener_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -77,6 +82,11 @@
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
         cls.pool_id = pool[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            cls.pool_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py b/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
index d5683ef..edbdc33 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_l7rule.py
@@ -61,6 +61,11 @@
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
         cls.listener_id = listener[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_listener_client.cleanup_listener,
+            cls.listener_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -76,6 +81,11 @@
         l7policy = cls.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
         cls.l7policy_id = l7policy[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_l7policy_client.cleanup_l7policy,
+            cls.l7policy_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
index 2850ec0..be997aa 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
@@ -76,6 +76,11 @@
         pool1 = cls.mem_pool_client.create_pool(**pool1_kwargs)
         pool1_id = pool1[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            pool1_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -92,6 +97,11 @@
         pool2 = cls.mem_pool_client.create_pool(**pool2_kwargs)
         pool2_id = pool2[const.ID]
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            pool2_id,
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
index 6df4c9c..c53e1c2 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_pool.py
@@ -359,6 +359,11 @@
                 **listener_kwargs)
             listener_id = listener[const.ID]
 
+            self.addCleanup(
+                self.mem_listener_client.cleanup_listener,
+                listener_id,
+                lb_client=self.mem_lb_client, lb_id=self.lb_id)
+
             waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
                                     self.lb_id, const.PROVISIONING_STATUS,
                                     const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py b/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
index e8221fe..a704b88 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_traffic_ops.py
@@ -120,6 +120,11 @@
 
         listener = cls.mem_listener_client.create_listener(**listener_kwargs)
 
+        cls.addClassResourceCleanup(
+            cls.mem_listener_client.cleanup_listener,
+            listener[const.ID],
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,
@@ -135,6 +140,11 @@
         }
         pool = cls.mem_pool_client.create_pool(**pool_kwargs)
 
+        cls.addClassResourceCleanup(
+            cls.mem_pool_client.cleanup_pool,
+            pool[const.ID],
+            lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
+
         waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
                                 cls.lb_id, const.PROVISIONING_STATUS,
                                 const.ACTIVE,