[TF] Fix tearDownClass in case of TungstenFabric
Related-PROD: PRODX-28985
Change-Id: I1f8ac0ed267c3c1f8d7f4de2f2e1ace5d78b37f5
diff --git a/octavia_tempest_plugin/tests/api/v2/test_pool.py b/octavia_tempest_plugin/tests/api/v2/test_pool.py
index d9f5f80..c61b19e 100644
--- a/octavia_tempest_plugin/tests/api/v2/test_pool.py
+++ b/octavia_tempest_plugin/tests/api/v2/test_pool.py
@@ -1148,6 +1148,11 @@
message = e.resp_body.get('faultstring', message)
raise testtools.TestCase.skipException(message)
+ if CONF.load_balancer.provider == 'tungstenfabric':
+ self.addCleanup(
+ self.mem_pool_client.cleanup_pool,
+ pool[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
@@ -1405,6 +1410,11 @@
message = e.resp_body.get('faultstring', message)
raise testtools.TestCase.skipException(message)
+ if CONF.load_balancer.provider == 'tungstenfabric':
+ self.addCleanup(
+ self.mem_pool_client.cleanup_pool,
+ pool[const.ID],
+ lb_client=self.mem_lb_client, lb_id=self.lb_id)
waiters.wait_for_status(
self.mem_lb_client.show_loadbalancer, self.lb_id,
const.PROVISIONING_STATUS, const.ACTIVE,
diff --git a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
index 79d43e7..dd6e3a4 100644
--- a/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
+++ b/octavia_tempest_plugin/tests/scenario/v2/test_listener.py
@@ -90,6 +90,10 @@
pool1 = cls.mem_pool_client.create_pool(**pool1_kwargs)
pool1_id = pool1[const.ID]
+ if CONF.load_balancer.provider == 'tungstenfabric':
+ cls.addCleanup(
+ cls.mem_pool_client.cleanup_pool, pool1_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,
@@ -118,6 +122,10 @@
pool2 = cls.mem_pool_client.create_pool(**pool2_kwargs)
pool2_id = pool2[const.ID]
+ if CONF.load_balancer.provider == 'tungstenfabric':
+ cls.addCleanup(
+ cls.mem_pool_client.cleanup_pool, pool2_id,
+ lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
cls.lb_id, const.PROVISIONING_STATUS,
const.ACTIVE,