Merge "setup.cfg: Replace dashes with underscores"
diff --git a/doc/requirements.txt b/doc/requirements.txt
new file mode 100644
index 0000000..5f0bf6a
--- /dev/null
+++ b/doc/requirements.txt
@@ -0,0 +1,3 @@
+sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
+openstackdocstheme>=1.31.2 # Apache-2.0
+reno>=2.5.0 # Apache-2.0
diff --git a/manila_tempest_tests/common/waiters.py b/manila_tempest_tests/common/waiters.py
new file mode 100644
index 0000000..e5ac0e1
--- /dev/null
+++ b/manila_tempest_tests/common/waiters.py
@@ -0,0 +1,176 @@
+# Copyright 2021 Red Hat, Inc.
+# All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import time
+
+import six
+from tempest import config
+from tempest.lib import exceptions
+
+from manila_tempest_tests.services.share.v2.json import shares_client
+from manila_tempest_tests import share_exceptions
+
+CONF = config.CONF
+LATEST_MICROVERSION = CONF.share.max_api_microversion
+
+
+def _get_access_rule(body, rule_id):
+    for rule in body:
+        if rule['id'] in rule_id:
+            return rule
+
+
+def _get_name_of_raise_method(resource_name):
+    if resource_name == 'snapshot_access_rule':
+        return 'AccessRuleBuildErrorException'
+    if resource_name == 'share_replica':
+        return 'ShareInstanceBuildErrorException'
+    resource_name = resource_name.title()
+    name = resource_name.replace('_', '')
+    return name + 'BuildErrorException'
+
+
+def wait_for_resource_status(client, resource_id, status,
+                             resource_name='share', rule_id=None,
+                             status_attr='status',
+                             raise_rule_in_error_state=True,
+                             version=LATEST_MICROVERSION):
+    """Waits for a resource to reach a given status."""
+
+    get_resource_action = {
+        'share': 'get_share',
+        'snapshot': 'get_snapshot',
+        'share_server': 'show_share_server',
+        'share_instance': 'get_share_instance',
+        'snapshot_instance': 'get_snapshot_instance',
+        'access_rule': 'list_access_rules',
+        'snapshot_access_rule': 'get_snapshot_access_rule',
+        'share_group': 'get_share_group',
+        'share_group_snapshot': 'get_share_group_snapshot',
+        'share_replica': 'get_share_replica',
+    }
+
+    # Since API v2 requests require an additional parameter for micro-versions,
+    # it's necessary to pass the required parameters according to the version.
+    resource_action = getattr(client, get_resource_action[resource_name])
+    method_args = [resource_id]
+    method_kwargs = {}
+    if isinstance(client, shares_client.SharesV2Client):
+        method_kwargs.update({'version': version})
+        if resource_name == 'snapshot_access_rule':
+            method_args.insert(1, rule_id)
+    body = resource_action(*method_args, **method_kwargs)
+
+    if resource_name == 'access_rule':
+        status_attr = 'state'
+        body = _get_access_rule(body, rule_id)
+
+    resource_status = body[status_attr]
+    start = int(time.time())
+
+    exp_status = status if isinstance(status, list) else [status]
+    while resource_status not in exp_status:
+        time.sleep(client.build_interval)
+        body = resource_action(*method_args, **method_kwargs)
+
+        if resource_name == 'access_rule':
+            status_attr = 'state'
+            body = _get_access_rule(body, rule_id)
+
+        resource_status = body[status_attr]
+
+        if resource_status in exp_status:
+            return
+        elif 'error' in resource_status.lower() and raise_rule_in_error_state:
+            raise_method = _get_name_of_raise_method(resource_name)
+            resource_exception = getattr(share_exceptions, raise_method)
+            raise resource_exception(resource_id=resource_id)
+        if int(time.time()) - start >= client.build_timeout:
+            message = ('%s %s failed to reach %s status (current %s) '
+                       'within the required time (%s s).' %
+                       (resource_name.replace('_', ' '), resource_id, status,
+                        resource_status, client.build_timeout))
+            raise exceptions.TimeoutException(message)
+
+
+def wait_for_migration_status(client, share_id, dest_host, status_to_wait,
+                              version=LATEST_MICROVERSION):
+    """Waits for a share to migrate to a certain host."""
+    statuses = ((status_to_wait,)
+                if not isinstance(status_to_wait, (tuple, list, set))
+                else status_to_wait)
+    share = client.get_share(share_id, version=version)
+    migration_timeout = CONF.share.migration_timeout
+    start = int(time.time())
+    while share['task_state'] not in statuses:
+        time.sleep(client.build_interval)
+        share = client.get_share(share_id, version=version)
+        if share['task_state'] in statuses:
+            break
+        elif share['task_state'] == 'migration_error':
+            raise share_exceptions.ShareMigrationException(
+                share_id=share['id'], src=share['host'], dest=dest_host)
+        elif int(time.time()) - start >= migration_timeout:
+            message = ('Share %(share_id)s failed to reach a status in'
+                       '%(status)s when migrating from host %(src)s to '
+                       'host %(dest)s within the required time '
+                       '%(timeout)s.' % {
+                           'src': share['host'],
+                           'dest': dest_host,
+                           'share_id': share['id'],
+                           'timeout': client.build_timeout,
+                           'status': six.text_type(statuses),
+                       })
+            raise exceptions.TimeoutException(message)
+    return share
+
+
+def wait_for_snapshot_access_rule_deletion(client, snapshot_id, rule_id):
+    rule = client.get_snapshot_access_rule(snapshot_id, rule_id)
+    start = int(time.time())
+
+    while rule is not None:
+        time.sleep(client.build_interval)
+
+        rule = client.get_snapshot_access_rule(snapshot_id, rule_id)
+
+        if rule is None:
+            return
+        if int(time.time()) - start >= client.build_timeout:
+            message = ('The snapshot access rule %(id)s failed to delete '
+                       'within the required time (%(time)ss).' %
+                       {
+                           'time': client.build_timeout,
+                           'id': rule_id,
+                       })
+            raise exceptions.TimeoutException(message)
+
+
+def wait_for_message(client, resource_id):
+    """Waits until a message for a resource with given id exists"""
+    start = int(time.time())
+    message = None
+
+    while not message:
+        time.sleep(client.build_interval)
+        for msg in client.list_messages():
+            if msg['resource_id'] == resource_id:
+                return msg
+
+        if int(time.time()) - start >= client.build_timeout:
+            message = ('No message for resource with id %s was created in'
+                       ' the required time (%s s).' %
+                       (resource_id, client.build_timeout))
+            raise exceptions.TimeoutException(message)
diff --git a/manila_tempest_tests/config.py b/manila_tempest_tests/config.py
index a24a064..7aaf5c9 100644
--- a/manila_tempest_tests/config.py
+++ b/manila_tempest_tests/config.py
@@ -29,7 +29,7 @@
                help="The minimum api microversion is configured to be the "
                     "value of the minimum microversion supported by Manila."),
     cfg.StrOpt("max_api_microversion",
-               default="2.57",
+               default="2.61",
                help="The maximum api microversion is configured to be the "
                     "value of the latest microversion supported by Manila."),
     cfg.StrOpt("region",
diff --git a/manila_tempest_tests/services/share/json/shares_client.py b/manila_tempest_tests/services/share/json/shares_client.py
index 3684cf4..f7b3694 100644
--- a/manila_tempest_tests/services/share/json/shares_client.py
+++ b/manila_tempest_tests/services/share/json/shares_client.py
@@ -218,72 +218,6 @@
         self.expected_success(202, resp.status)
         return body
 
-    def wait_for_share_status(self, share_id, status):
-        """Waits for a share to reach a given status."""
-        body = self.get_share(share_id)
-        share_name = body['name']
-        share_status = body['status']
-        start = int(time.time())
-
-        while share_status != status:
-            time.sleep(self.build_interval)
-            body = self.get_share(share_id)
-            share_status = body['status']
-            if share_status == status:
-                return
-            elif 'error' in share_status.lower():
-                raise share_exceptions.ShareBuildErrorException(
-                    share_id=share_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('Share %s failed to reach %s status within '
-                           'the required time (%s s).' %
-                           (share_name, status, self.build_timeout))
-                raise exceptions.TimeoutException(message)
-
-    def wait_for_snapshot_status(self, snapshot_id, status):
-        """Waits for a snapshot to reach a given status."""
-        body = self.get_snapshot(snapshot_id)
-        snapshot_name = body['name']
-        snapshot_status = body['status']
-        start = int(time.time())
-
-        while snapshot_status != status:
-            time.sleep(self.build_interval)
-            body = self.get_snapshot(snapshot_id)
-            snapshot_status = body['status']
-            if 'error' in snapshot_status:
-                raise share_exceptions.SnapshotBuildErrorException(
-                    snapshot_id=snapshot_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('Share Snapshot %s failed to reach %s status '
-                           'within the required time (%s s).' %
-                           (snapshot_name, status, self.build_timeout))
-                raise exceptions.TimeoutException(message)
-
-    def wait_for_access_rule_status(self, share_id, rule_id, status,
-                                    raise_rule_in_error_state=True):
-        """Waits for an access rule to reach a given status."""
-        rule_status = "new"
-        start = int(time.time())
-        while rule_status != status:
-            time.sleep(self.build_interval)
-            rules = self.list_access_rules(share_id)
-            for rule in rules:
-                if rule["id"] in rule_id:
-                    rule_status = rule['state']
-                    break
-            if 'error' in rule_status and raise_rule_in_error_state:
-                raise share_exceptions.AccessRuleBuildErrorException(
-                    rule_id=rule_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('Share Access Rule %s failed to reach %s status '
-                           'within the required time (%s s).' %
-                           (rule_id, status, self.build_timeout))
-                raise exceptions.TimeoutException(message)
-
     def default_quotas(self, tenant_id):
         resp, body = self.get("os-quota-sets/%s/defaults" % tenant_id)
         self.expected_success(200, resp.status)
diff --git a/manila_tempest_tests/services/share/v2/json/shares_client.py b/manila_tempest_tests/services/share/v2/json/shares_client.py
index c1a0f3c..61f053f 100644
--- a/manila_tempest_tests/services/share/v2/json/shares_client.py
+++ b/manila_tempest_tests/services/share/v2/json/shares_client.py
@@ -17,11 +17,9 @@
 import re
 import time
 
-import six
 from six.moves.urllib import parse
 from tempest import config
 from tempest.lib.common.utils import data_utils
-from tempest.lib import exceptions
 
 from manila_tempest_tests.common import constants
 from manila_tempest_tests.services.share.json import shares_client
@@ -373,54 +371,6 @@
         self.expected_success(200, resp.status)
         return self._parse_resp(body)
 
-    def wait_for_share_instance_status(self, instance_id, status,
-                                       version=LATEST_MICROVERSION):
-        """Waits for a share to reach a given status."""
-        body = self.get_share_instance(instance_id, version=version)
-        instance_status = body['status']
-        start = int(time.time())
-
-        while instance_status != status:
-            time.sleep(self.build_interval)
-            body = self.get_share(instance_id)
-            instance_status = body['status']
-            if instance_status == status:
-                return
-            elif 'error' in instance_status.lower():
-                raise share_exceptions.ShareInstanceBuildErrorException(
-                    id=instance_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('Share instance %s failed to reach %s status within'
-                           ' the required time (%s s).' %
-                           (instance_id, status, self.build_timeout))
-                raise exceptions.TimeoutException(message)
-
-    def wait_for_share_status(self, share_id, status, status_attr='status',
-                              version=LATEST_MICROVERSION):
-        """Waits for a share to reach a given status."""
-        body = self.get_share(share_id, version=version)
-        share_status = body[status_attr]
-        start = int(time.time())
-
-        exp_status = status if isinstance(status, list) else [status]
-        while share_status not in exp_status:
-            time.sleep(self.build_interval)
-            body = self.get_share(share_id, version=version)
-            share_status = body[status_attr]
-            if share_status in exp_status:
-                return
-            elif 'error' in share_status.lower():
-                raise share_exceptions.ShareBuildErrorException(
-                    share_id=share_id)
-            if int(time.time()) - start >= self.build_timeout:
-                message = ("Share's %(status_attr)s failed to transition to "
-                           "%(status)s within the required "
-                           "time %(seconds)s." %
-                           {"status_attr": status_attr, "status": exp_status,
-                            "seconds": self.build_timeout})
-                raise exceptions.TimeoutException(message)
-
 ###############
 
     def extend_share(self, share_id, new_size, version=LATEST_MICROVERSION,
@@ -566,30 +516,6 @@
         self.expected_success(202, resp.status)
         return body
 
-    def wait_for_snapshot_status(self, snapshot_id, status,
-                                 version=LATEST_MICROVERSION):
-        """Waits for a snapshot to reach a given status."""
-        body = self.get_snapshot(snapshot_id, version=version)
-        snapshot_name = body['name']
-        snapshot_status = body['status']
-        start = int(time.time())
-
-        while snapshot_status != status:
-            time.sleep(self.build_interval)
-            body = self.get_snapshot(snapshot_id, version=version)
-            snapshot_status = body['status']
-            if snapshot_status in status:
-                return
-            if 'error' in snapshot_status:
-                raise (share_exceptions.
-                       SnapshotBuildErrorException(snapshot_id=snapshot_id))
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('Share Snapshot %s failed to reach %s status '
-                           'within the required time (%s s).' %
-                           (snapshot_name, status, self.build_timeout))
-                raise exceptions.TimeoutException(message)
-
     def manage_snapshot(self, share_id, provider_location,
                         name=None, description=None,
                         version=LATEST_MICROVERSION,
@@ -697,6 +623,15 @@
 
 ###############
 
+    def show_share_server(self, share_server_id, version=LATEST_MICROVERSION):
+        """Get share server info."""
+        uri = "share-servers/%s" % share_server_id
+        resp, body = self.get(uri, version=version)
+        self.expected_success(200, resp.status)
+        return self._parse_resp(body)
+
+###############
+
     def get_snapshot_instance(self, instance_id, version=LATEST_MICROVERSION):
         resp, body = self.get("snapshot-instances/%s" % instance_id,
                               version=version)
@@ -728,35 +663,6 @@
         self.expected_success(202, resp.status)
         return self._parse_resp(body)
 
-    def wait_for_snapshot_instance_status(self, instance_id, expected_status):
-        """Waits for a snapshot instance status to reach a given status."""
-        body = self.get_snapshot_instance(instance_id)
-        instance_status = body['status']
-        start = int(time.time())
-
-        while instance_status != expected_status:
-            time.sleep(self.build_interval)
-            body = self.get_snapshot_instance(instance_id)
-            instance_status = body['status']
-            if instance_status == expected_status:
-                return
-            if 'error' in instance_status:
-                raise share_exceptions.SnapshotInstanceBuildErrorException(
-                    id=instance_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('The status of snapshot instance %(id)s failed to '
-                           'reach %(expected_status)s status within the '
-                           'required time (%(time)ss). Current '
-                           'status: %(current_status)s.' %
-                           {
-                               'expected_status': expected_status,
-                               'time': self.build_timeout,
-                               'id': instance_id,
-                               'current_status': instance_status,
-                           })
-                raise exceptions.TimeoutException(message)
-
     def get_snapshot_instance_export_location(
             self, instance_id, export_location_uuid,
             version=LATEST_MICROVERSION):
@@ -849,7 +755,7 @@
         self.expected_success(202, resp.status)
         return body
 
-    def get_access(self, access_id, version=LATEST_MICROVERSION):
+    def get_access_rule(self, access_id, version=LATEST_MICROVERSION):
         resp, body = self.get("share-access-rules/%s" % access_id,
                               version=version)
         self.expected_success(200, resp.status)
@@ -1171,29 +1077,6 @@
         self.force_delete(share_group_id, s_type='share-groups',
                           headers=headers, version=version)
 
-    def wait_for_share_group_status(self, share_group_id, status):
-        """Waits for a share group to reach a given status."""
-        body = self.get_share_group(share_group_id)
-        sg_name = body['name']
-        sg_status = body['status']
-        start = int(time.time())
-
-        while sg_status != status:
-            time.sleep(self.build_interval)
-            body = self.get_share_group(share_group_id)
-            sg_status = body['status']
-            if 'error' in sg_status and status != 'error':
-                raise share_exceptions.ShareGroupBuildErrorException(
-                    share_group_id=share_group_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                sg_name = sg_name or share_group_id
-                message = ('Share Group %s failed to reach %s status '
-                           'within the required time (%s s). '
-                           'Current status: %s' %
-                           (sg_name, status, self.build_timeout, sg_status))
-                raise exceptions.TimeoutException(message)
-
 ###############
 
     def create_share_group_type(self, name=None, share_types=(),
@@ -1458,28 +1341,6 @@
             share_group_snapshot_id, s_type='share-group-snapshots',
             headers=headers, version=version)
 
-    def wait_for_share_group_snapshot_status(self, share_group_snapshot_id,
-                                             status):
-        """Waits for a share group snapshot to reach a given status."""
-        body = self.get_share_group_snapshot(share_group_snapshot_id)
-        sg_snapshot_name = body['name']
-        sg_snapshot_status = body['status']
-        start = int(time.time())
-
-        while sg_snapshot_status != status:
-            time.sleep(self.build_interval)
-            body = self.get_share_group_snapshot(share_group_snapshot_id)
-            sg_snapshot_status = body['status']
-            if 'error' in sg_snapshot_status and status != 'error':
-                raise share_exceptions.ShareGroupSnapshotBuildErrorException(
-                    share_group_snapshot_id=share_group_snapshot_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('Share Group Snapshot %s failed to reach %s status '
-                           'within the required time (%s s).' %
-                           (sg_snapshot_name, status, self.build_timeout))
-                raise exceptions.TimeoutException(message)
-
 ###############
 
     def manage_share_server(self, host, share_network_id, identifier,
@@ -1511,31 +1372,6 @@
         self.expected_success(202, resp.status)
         return self._parse_resp(body)
 
-    def wait_for_share_server_status(self, server_id, status,
-                                     status_attr='status'):
-        """Waits for a share to reach a given status."""
-        body = self.show_share_server(server_id)
-        server_status = body[status_attr]
-        start = int(time.time())
-
-        while server_status != status:
-            time.sleep(self.build_interval)
-            body = self.show_share_server(server_id)
-            server_status = body[status_attr]
-            if server_status in status:
-                return
-            elif constants.STATUS_ERROR in server_status.lower():
-                raise share_exceptions.ShareServerBuildErrorException(
-                    server_id=server_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ("Share server's %(status_attr)s failed to "
-                           "transition to %(status)s within the required "
-                           "time %(seconds)s." %
-                           {"status_attr": status_attr, "status": status,
-                            "seconds": self.build_timeout})
-                raise exceptions.TimeoutException(message)
-
     def share_server_reset_state(self, share_server_id,
                                  status=constants.SERVER_STATE_ACTIVE,
                                  version=LATEST_MICROVERSION):
@@ -1613,37 +1449,6 @@
                          headers=EXPERIMENTAL, extra_headers=True,
                          version=version)
 
-    def wait_for_migration_status(self, share_id, dest_host, status_to_wait,
-                                  version=LATEST_MICROVERSION):
-        """Waits for a share to migrate to a certain host."""
-        statuses = ((status_to_wait,)
-                    if not isinstance(status_to_wait, (tuple, list, set))
-                    else status_to_wait)
-        share = self.get_share(share_id, version=version)
-        migration_timeout = CONF.share.migration_timeout
-        start = int(time.time())
-        while share['task_state'] not in statuses:
-            time.sleep(self.build_interval)
-            share = self.get_share(share_id, version=version)
-            if share['task_state'] in statuses:
-                break
-            elif share['task_state'] == 'migration_error':
-                raise share_exceptions.ShareMigrationException(
-                    share_id=share['id'], src=share['host'], dest=dest_host)
-            elif int(time.time()) - start >= migration_timeout:
-                message = ('Share %(share_id)s failed to reach a status in'
-                           '%(status)s when migrating from host %(src)s to '
-                           'host %(dest)s within the required time '
-                           '%(timeout)s.' % {
-                               'src': share['host'],
-                               'dest': dest_host,
-                               'share_id': share['id'],
-                               'timeout': self.build_timeout,
-                               'status': six.text_type(statuses),
-                           })
-                raise exceptions.TimeoutException(message)
-        return share
-
 ################
 
     def create_share_replica(self, share_id, availability_zone=None,
@@ -1751,38 +1556,6 @@
         self.expected_success(expected_status, resp.status)
         return self._parse_resp(body)
 
-    def wait_for_share_replica_status(self, replica_id, expected_status,
-                                      status_attr='status'):
-        """Waits for a replica's status_attr to reach a given status."""
-        body = self.get_share_replica(replica_id)
-        replica_status = body[status_attr]
-        start = int(time.time())
-
-        while replica_status != expected_status:
-            time.sleep(self.build_interval)
-            body = self.get_share_replica(replica_id)
-            replica_status = body[status_attr]
-            if replica_status == expected_status:
-                return
-            if ('error' in replica_status
-                    and expected_status != constants.STATUS_ERROR):
-                raise share_exceptions.ShareInstanceBuildErrorException(
-                    id=replica_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('The %(status_attr)s of Replica %(id)s failed to '
-                           'reach %(expected_status)s status within the '
-                           'required time (%(time)ss). Current '
-                           '%(status_attr)s: %(current_status)s.' %
-                           {
-                               'status_attr': status_attr,
-                               'expected_status': expected_status,
-                               'time': self.build_timeout,
-                               'id': replica_id,
-                               'current_status': replica_status,
-                           })
-                raise exceptions.TimeoutException(message)
-
     def reset_share_replica_status(self, replica_id,
                                    status=constants.STATUS_AVAILABLE,
                                    version=LATEST_MICROVERSION):
@@ -1893,43 +1666,15 @@
         self.expected_success(202, resp.status)
         return self._parse_resp(body)
 
-    def get_snapshot_access_rule(self, snapshot_id, rule_id):
+    def get_snapshot_access_rule(self, snapshot_id, rule_id,
+                                 version=LATEST_MICROVERSION):
         resp, body = self.get("snapshots/%s/access-list" % snapshot_id,
-                              version=LATEST_MICROVERSION)
+                              version=version)
         body = self._parse_resp(body)
         found_rules = [r for r in body if r['id'] == rule_id]
 
         return found_rules[0] if len(found_rules) > 0 else None
 
-    def wait_for_snapshot_access_rule_status(self, snapshot_id, rule_id,
-                                             expected_state='active'):
-        rule = self.get_snapshot_access_rule(snapshot_id, rule_id)
-        state = rule['state']
-        start = int(time.time())
-
-        while state != expected_state:
-            time.sleep(self.build_interval)
-            rule = self.get_snapshot_access_rule(snapshot_id, rule_id)
-            state = rule['state']
-            if state == expected_state:
-                return
-            if 'error' in state:
-                raise share_exceptions.AccessRuleBuildErrorException(
-                    snapshot_id)
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('The status of snapshot access rule %(id)s failed '
-                           'to reach %(expected_state)s state within the '
-                           'required time (%(time)ss). Current '
-                           'state: %(current_state)s.' %
-                           {
-                               'expected_state': expected_state,
-                               'time': self.build_timeout,
-                               'id': rule_id,
-                               'current_state': state,
-                           })
-                raise exceptions.TimeoutException(message)
-
     def delete_snapshot_access_rule(self, snapshot_id, rule_id):
         body = {
             "deny_access": {
@@ -1941,26 +1686,6 @@
         self.expected_success(202, resp.status)
         return self._parse_resp(body)
 
-    def wait_for_snapshot_access_rule_deletion(self, snapshot_id, rule_id):
-        rule = self.get_snapshot_access_rule(snapshot_id, rule_id)
-        start = int(time.time())
-
-        while rule is not None:
-            time.sleep(self.build_interval)
-
-            rule = self.get_snapshot_access_rule(snapshot_id, rule_id)
-
-            if rule is None:
-                return
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('The snapshot access rule %(id)s failed to delete '
-                           'within the required time (%(time)ss).' %
-                           {
-                               'time': self.build_timeout,
-                               'id': rule_id,
-                           })
-                raise exceptions.TimeoutException(message)
-
     def get_snapshot_export_location(self, snapshot_id, export_location_uuid,
                                      version=LATEST_MICROVERSION):
         resp, body = self.get(
@@ -2001,23 +1726,6 @@
         self.expected_success(204, resp.status)
         return self._parse_resp(body)
 
-    def wait_for_message(self, resource_id):
-        """Waits until a message for a resource with given id exists"""
-        start = int(time.time())
-        message = None
-
-        while not message:
-            time.sleep(self.build_interval)
-            for msg in self.list_messages():
-                if msg['resource_id'] == resource_id:
-                    return msg
-
-            if int(time.time()) - start >= self.build_timeout:
-                message = ('No message for resource with id %s was created in'
-                           ' the required time (%s s).' %
-                           (resource_id, self.build_timeout))
-                raise exceptions.TimeoutException(message)
-
 ###############
 
     def create_security_service(self, ss_type="ldap",
diff --git a/manila_tempest_tests/share_exceptions.py b/manila_tempest_tests/share_exceptions.py
index 9466afe..efa61b5 100644
--- a/manila_tempest_tests/share_exceptions.py
+++ b/manila_tempest_tests/share_exceptions.py
@@ -17,33 +17,34 @@
 
 
 class ShareBuildErrorException(exceptions.TempestException):
-    message = "Share %(share_id)s failed to build and is in ERROR status"
+    message = "Share %(resource_id)s failed to build and is in ERROR status"
 
 
 class ShareInstanceBuildErrorException(exceptions.TempestException):
-    message = "Share instance %(id)s failed to build and is in ERROR status"
+    message = ("Share instance %(resource_id)s failed to build and is in "
+               "ERROR status")
 
 
 class ShareGroupBuildErrorException(exceptions.TempestException):
-    message = ("Share group %(share_group_id)s failed to build and "
+    message = ("Share group %(resource_id)s failed to build and "
                "is in ERROR status")
 
 
 class AccessRuleBuildErrorException(exceptions.TempestException):
-    message = "Share's rule with id %(rule_id)s is in ERROR status"
+    message = "Share's rule with id %(resource_id)s is in ERROR status"
 
 
 class SnapshotBuildErrorException(exceptions.TempestException):
-    message = "Snapshot %(snapshot_id)s failed to build and is in ERROR status"
+    message = "Snapshot %(resource_id)s failed to build and is in ERROR status"
 
 
 class SnapshotInstanceBuildErrorException(exceptions.TempestException):
-    message = ("Snapshot instance %(id)s failed to build and is in "
+    message = ("Snapshot instance %(resource_id)s failed to build and is in "
                "ERROR status.")
 
 
 class ShareGroupSnapshotBuildErrorException(exceptions.TempestException):
-    message = ("Share Group Snapshot %(share_group_snapshot_id)s failed "
+    message = ("Share Group Snapshot %(resource_id)s failed "
                "to build and is in ERROR status")
 
 
diff --git a/manila_tempest_tests/tests/api/admin/test_admin_actions.py b/manila_tempest_tests/tests/api/admin/test_admin_actions.py
index 57b97e3..e0c49e7 100644
--- a/manila_tempest_tests/tests/api/admin/test_admin_actions.py
+++ b/manila_tempest_tests/tests/api/admin/test_admin_actions.py
@@ -19,7 +19,9 @@
 import testtools
 from testtools import testcase as tc
 
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 CONF = config.CONF
 
@@ -39,22 +41,20 @@
         # create share
         cls.sh = cls.create_share(share_type_id=cls.share_type_id)
 
-    def _wait_for_resource_status(self, resource_id, resource_type):
-        wait_for_resource_status = getattr(
-            self.shares_v2_client, "wait_for_{}_status".format(resource_type))
-        wait_for_resource_status(resource_id, "available")
-
     def _reset_resource_available(self, resource_id, resource_type="shares"):
         self.shares_v2_client.reset_state(
             resource_id, s_type=resource_type, status="available")
-        self._wait_for_resource_status(resource_id, resource_type[:-1])
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, resource_id, "available",
+            resource_name=resource_type[:-1])
 
     @decorators.idempotent_id('4f8c6ae9-0656-445f-a911-fbf98fe761d0')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     @ddt.data("error", "available", "error_deleting", "deleting", "creating")
     def test_reset_share_state(self, status):
         self.shares_v2_client.reset_state(self.sh["id"], status=status)
-        self.shares_v2_client.wait_for_share_status(self.sh["id"], status)
+        waiters.wait_for_resource_status(self.shares_v2_client,
+                                         self.sh["id"], status)
         self.addCleanup(self._reset_resource_available, self.sh["id"])
 
     @decorators.idempotent_id('13075b2d-fe83-41bf-b6ef-99cfcc00257d')
@@ -66,8 +66,9 @@
         share_instance_id = sh_instance["id"]
         self.shares_v2_client.reset_state(
             share_instance_id, s_type="share_instances", status=status)
-        self.shares_v2_client.wait_for_share_instance_status(
-            share_instance_id, status)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_instance_id, status,
+            resource_name='share_instance')
         self.addCleanup(self._reset_resource_available,
                         share_instance_id, "share_instances")
 
@@ -80,8 +81,9 @@
         snapshot = self.create_snapshot_wait_for_active(self.sh["id"])
         self.shares_v2_client.reset_state(
             snapshot["id"], s_type="snapshots", status=status)
-        self.shares_v2_client.wait_for_snapshot_status(
-            snapshot["id"], status)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, snapshot["id"], status,
+            resource_name='snapshot')
         self.addCleanup(self._reset_resource_available,
                         snapshot["id"], "snapshots")
 
@@ -147,9 +149,23 @@
 
     @decorators.idempotent_id('49a576eb-733a-4299-aa6f-918fe7c67a6a')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_reset_share_task_state(self):
         for task_state in self.task_states:
             self.shares_v2_client.reset_task_state(self.sh["id"], task_state)
-            self.shares_v2_client.wait_for_share_status(
-                self.sh["id"], task_state, 'task_state')
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.sh["id"], task_state,
+                status_attr='task_state')
+
+    @decorators.idempotent_id('4233b941-a909-4f35-9ec9-753736949dd2')
+    @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
+    def test_ensure_share_server_creation_when_dhss_enabled(self):
+        # This check will ensure that when a share creation request is handled,
+        # if the driver has the "driver handles share servers" option enabled,
+        # that a share server will be created, otherwise, not.
+        share_get = self.admin_shares_v2_client.get_share(self.sh['id'])
+        share_server = share_get['share_server_id']
+        if CONF.share.multitenancy_enabled:
+            self.assertNotEmpty(share_server)
+        else:
+            self.assertEmpty(share_server)
diff --git a/manila_tempest_tests/tests/api/admin/test_admin_actions_negative.py b/manila_tempest_tests/tests/api/admin/test_admin_actions_negative.py
index dd64af5..82f59cb 100644
--- a/manila_tempest_tests/tests/api/admin/test_admin_actions_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_admin_actions_negative.py
@@ -21,6 +21,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 CONF = config.CONF
 
@@ -143,7 +144,7 @@
 
     @decorators.idempotent_id('d662457c-2b84-4f13-aee7-5ffafe2552f1')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_reset_task_state_invalid_state(self):
         self.assertRaises(
             lib_exc.BadRequest, self.admin_client.reset_task_state,
@@ -168,7 +169,7 @@
 
     @decorators.idempotent_id('aba8638c-bfed-4c3e-994b-5309fcd912b2')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_reset_task_state_share_not_found(self):
         self.assertRaises(
             lib_exc.NotFound, self.admin_client.reset_task_state,
diff --git a/manila_tempest_tests/tests/api/admin/test_migration.py b/manila_tempest_tests/tests/api/admin/test_migration.py
index 25e7213..9269c71 100644
--- a/manila_tempest_tests/tests/api/admin/test_migration.py
+++ b/manila_tempest_tests/tests/api/admin/test_migration.py
@@ -22,6 +22,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -113,15 +114,16 @@
         self.shares_v2_client.create_access_rule(
             share['id'], access_to="50.50.50.50", access_level="rw")
 
-        self.shares_v2_client.wait_for_share_status(
+        waiters.wait_for_resource_status(
+            self.shares_v2_client,
             share['id'], constants.RULE_STATE_ACTIVE,
             status_attr='access_rules_status')
 
         self.shares_v2_client.create_access_rule(
             share['id'], access_to="51.51.51.51", access_level="ro")
 
-        self.shares_v2_client.wait_for_share_status(
-            share['id'], constants.RULE_STATE_ACTIVE,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'], constants.RULE_STATE_ACTIVE,
             status_attr='access_rules_status')
 
         dest_pool = dest_pool['name']
@@ -235,15 +237,15 @@
         if resize == 'extend':
             new_size = CONF.share.share_size + 2
             self.shares_v2_client.extend_share(share['id'], new_size)
-            self.shares_v2_client.wait_for_share_status(
-                share['id'], constants.STATUS_AVAILABLE)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, share['id'], constants.STATUS_AVAILABLE)
             share = self.shares_v2_client.get_share(share["id"])
             self.assertEqual(new_size, int(share["size"]))
         else:
             new_size = CONF.share.share_size
             self.shares_v2_client.shrink_share(share['id'], new_size)
-            self.shares_v2_client.wait_for_share_status(
-                share['id'], constants.STATUS_AVAILABLE)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, share['id'], constants.STATUS_AVAILABLE)
             share = self.shares_v2_client.get_share(share["id"])
             self.assertEqual(new_size, int(share["size"]))
 
@@ -364,7 +366,7 @@
 
     @decorators.idempotent_id('d39dfa1b-6e91-4efc-84f1-76f878b51f2a')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @ddt.data(True, False)
     def test_migration_cancel(self, force_host_assisted):
         self._check_migration_enabled(force_host_assisted)
@@ -402,7 +404,7 @@
 
     @decorators.idempotent_id('640dce56-2084-488d-8dea-456840ff397e')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(
         CONF.share.run_snapshot_tests, 'Snapshot tests are disabled.')
     @testtools.skipUnless(
@@ -438,7 +440,7 @@
 
     @decorators.idempotent_id('d8cce50d-e8da-4fbc-8f94-0827bf277b6c')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @ddt.data(True, False)
     def test_migration_opposite_driver_modes(self, force_host_assisted):
         self._check_migration_enabled(force_host_assisted)
@@ -506,7 +508,7 @@
 
     @decorators.idempotent_id('e6cf0e4d-bdf3-49c1-b6ba-56d1ad6c81d2')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @ddt.data(True, False)
     def test_migration_2phase(self, force_host_assisted):
         self._check_migration_enabled(force_host_assisted)
@@ -558,7 +560,7 @@
 
     @decorators.idempotent_id('0e3d75e0-385a-4f7a-889f-2a3db79db8c2')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(
         CONF.share.run_extend_tests, 'Extend share tests are disabled.')
     @ddt.data(True, False)
@@ -572,7 +574,7 @@
 
     @decorators.idempotent_id('58c72e51-d217-48bc-8155-5a010912312e')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(
         CONF.share.run_shrink_tests, 'Shrink share tests are disabled.')
     @ddt.data(True, False)
@@ -586,7 +588,7 @@
 
     @decorators.idempotent_id('a95eb701-626a-4175-967b-4880d3716857')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(
         CONF.share.run_snapshot_tests, 'Snapshot tests are disabled.')
     @testtools.skipUnless(
@@ -628,7 +630,7 @@
 
     @decorators.idempotent_id('a18b3637-2070-4a1b-acd9-c392eb7963b5')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(CONF.share.run_snapshot_tests,
                           'Snapshot tests are disabled.')
     @ddt.data(True, False)
@@ -641,7 +643,7 @@
 
     @decorators.idempotent_id('59313673-6576-4163-ab96-41bafcdad63a')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(CONF.share.run_snapshot_tests,
                           'Snapshot tests are disabled.')
     @ddt.data(True, False)
diff --git a/manila_tempest_tests/tests/api/admin/test_migration_negative.py b/manila_tempest_tests/tests/api/admin/test_migration_negative.py
index 12e3fb0..cff14f0 100644
--- a/manila_tempest_tests/tests/api/admin/test_migration_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_migration_negative.py
@@ -23,6 +23,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -82,7 +83,7 @@
 
     @decorators.idempotent_id('8aa1f2a0-bc44-4df5-a556-161590e594a3')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_migration_cancel_invalid(self):
         self.assertRaises(
             lib_exc.BadRequest, self.shares_v2_client.migration_cancel,
@@ -90,18 +91,19 @@
 
     @decorators.idempotent_id('6d0dfb2e-51a0-4cb7-8c69-6135a49c6057')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_migration_get_progress_None(self):
         self.shares_v2_client.reset_task_state(self.share["id"], None)
-        self.shares_v2_client.wait_for_share_status(
-            self.share["id"], None, 'task_state')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share["id"], None,
+            status_attr='task_state')
         self.assertRaises(
             lib_exc.BadRequest, self.shares_v2_client.migration_get_progress,
             self.share['id'])
 
     @decorators.idempotent_id('2ab1fc82-bc13-4c99-8324-c6b23530e8a4')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_migration_complete_invalid(self):
         self.assertRaises(
             lib_exc.BadRequest, self.shares_v2_client.migration_complete,
@@ -109,7 +111,7 @@
 
     @decorators.idempotent_id('8ef562b4-7704-4a78-973f-9bf8d2b6f6a6')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_migration_cancel_not_found(self):
         self.assertRaises(
             lib_exc.NotFound, self.shares_v2_client.migration_cancel,
@@ -117,7 +119,7 @@
 
     @decorators.idempotent_id('044c792b-63e0-42c3-9f44-dc2280e2af08')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_migration_get_progress_not_found(self):
         self.assertRaises(
             lib_exc.NotFound, self.shares_v2_client.migration_get_progress,
@@ -125,7 +127,7 @@
 
     @decorators.idempotent_id('a509871a-3f3a-4618-bb60-9661732dd371')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.22")
+    @utils.skip_if_microversion_lt("2.22")
     def test_migration_complete_not_found(self):
         self.assertRaises(
             lib_exc.NotFound, self.shares_v2_client.migration_complete,
@@ -133,7 +135,7 @@
 
     @decorators.idempotent_id('6276bea6-6939-4569-930f-218d99c0fa56')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(CONF.share.run_snapshot_tests,
                           "Snapshot tests are disabled.")
     def test_migrate_share_with_snapshot(self):
@@ -148,7 +150,7 @@
 
     @decorators.idempotent_id('78670c24-c4ee-45b5-b166-2d053c333144')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @ddt.data(True, False)
     def test_migrate_share_same_host(self, specified):
         new_share_type_id = None
@@ -166,7 +168,7 @@
 
     @decorators.idempotent_id('af17204f-ffab-4ba8-8cb6-032e49216f67')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migrate_share_host_invalid(self):
         self.assertRaises(
             lib_exc.NotFound, self.shares_v2_client.migrate_share,
@@ -174,7 +176,7 @@
 
     @decorators.idempotent_id('0558e9c4-0416-41d2-b28a-803d4b81521a')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @ddt.data({'writable': False, 'preserve_metadata': False,
                'preserve_snapshots': False, 'nondisruptive': True},
               {'writable': False, 'preserve_metadata': False,
@@ -196,7 +198,7 @@
 
     @decorators.idempotent_id('ee57024c-d00e-4def-8eec-cbc62bae327f')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migrate_share_change_type_no_valid_host(self):
         if not CONF.share.multitenancy_enabled:
             new_share_network_id = self.create_share_network(
@@ -209,13 +211,13 @@
             self.share['id'], self.dest_pool,
             new_share_type_id=self.new_type_invalid['share_type']['id'],
             new_share_network_id=new_share_network_id)
-        self.shares_v2_client.wait_for_migration_status(
-            self.share['id'], self.dest_pool,
+        waiters.wait_for_migration_status(
+            self.shares_v2_client, self.share['id'], self.dest_pool,
             constants.TASK_STATE_MIGRATION_ERROR)
 
     @decorators.idempotent_id('e2bd0cca-c091-4785-a9dc-7f42d2bb95a5')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migrate_share_not_found(self):
         self.assertRaises(
             lib_exc.NotFound, self.shares_v2_client.migrate_share,
@@ -223,23 +225,24 @@
 
     @decorators.idempotent_id('86b427a7-27c0-4cd5-8f52-9688b339980b')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migrate_share_not_available(self):
         self.shares_client.reset_state(self.share['id'],
                                        constants.STATUS_ERROR)
-        self.shares_client.wait_for_share_status(self.share['id'],
-                                                 constants.STATUS_ERROR)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share['id'], constants.STATUS_ERROR)
         self.assertRaises(
             lib_exc.BadRequest, self.shares_v2_client.migrate_share,
             self.share['id'], self.dest_pool)
         self.shares_client.reset_state(self.share['id'],
                                        constants.STATUS_AVAILABLE)
-        self.shares_client.wait_for_share_status(self.share['id'],
-                                                 constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share['id'],
+            constants.STATUS_AVAILABLE)
 
     @decorators.idempotent_id('e8f1e491-697a-4941-bf51-4d37f0a93fa5')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migrate_share_invalid_share_network(self):
         self.assertRaises(
             lib_exc.BadRequest, self.shares_v2_client.migrate_share,
@@ -248,7 +251,7 @@
 
     @decorators.idempotent_id('be262d44-2ca2-4b9c-be3a-5a6a98ed871b')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migrate_share_invalid_share_type(self):
         self.assertRaises(
             lib_exc.BadRequest, self.shares_v2_client.migrate_share,
@@ -257,7 +260,7 @@
 
     @decorators.idempotent_id('16c72693-6f9e-4cb4-a166-c60accd3479b')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migrate_share_opposite_type_share_network_invalid(self):
 
         extra_specs = utils.get_configured_extra_specs(
@@ -283,7 +286,7 @@
 
     @decorators.idempotent_id('1f529b09-e404-4f0e-9423-bb4b117b5522')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.48")
+    @utils.skip_if_microversion_lt("2.48")
     def test_share_type_azs_share_migrate_unsupported_az(self):
         extra_specs = self.add_extra_specs_to_dict({
             'availability_zones': 'non-existent az'})
@@ -299,13 +302,13 @@
     @testtools.skipUnless(CONF.share.run_driver_assisted_migration_tests,
                           "Driver-assisted migration tests are disabled.")
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_create_snapshot_during_share_migration(self):
         self._test_share_actions_during_share_migration('create_snapshot', [])
 
     @decorators.idempotent_id('20121039-bb11-45d8-9972-d2daff7a779c')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @ddt.data(('extend_share', [CONF.share.share_size + 2]),
               ('shrink_share', [CONF.share.share_size]))
     @ddt.unpack
@@ -323,7 +326,7 @@
 
     @decorators.idempotent_id('6e83fc25-4e3e-49a7-93e8-db4e6b355a91')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_add_access_rule_during_migration(self):
         access_type = "ip"
         access_to = "50.50.50.50"
diff --git a/manila_tempest_tests/tests/api/admin/test_multi_backend.py b/manila_tempest_tests/tests/api/admin/test_multi_backend.py
index cfa9baa..11feb5e 100644
--- a/manila_tempest_tests/tests/api/admin/test_multi_backend.py
+++ b/manila_tempest_tests/tests/api/admin/test_multi_backend.py
@@ -26,6 +26,15 @@
 
 class ShareMultiBackendTest(base.BaseSharesAdminTest):
 
+    @staticmethod
+    def _share_protocol(protocol):
+        protocols_list = protocol.lower().split('_')
+        allowed_protocols = [
+            i for i in CONF.share.enable_protocols
+            if i.lower() in protocols_list
+        ]
+        return allowed_protocols
+
     @classmethod
     def resource_setup(cls):
         super(ShareMultiBackendTest, cls).resource_setup()
@@ -41,18 +50,33 @@
         cls.shares = []
         share_data_list = []
 
+        pools = cls.shares_v2_client.list_pools(detail=True)['pools']
+        backends_protocols = {
+            pool['capabilities']['share_backend_name']: pool[
+                'capabilities']['storage_protocol'] for pool in pools
+        }
         # Create share types
-        for i in [0, 1]:
-            st_name = data_utils.rand_name("share-type-%s" % str(i))
+        for backend in CONF.share.backend_names:
+            share_protocol = cls._share_protocol(backends_protocols[backend])
+            if not share_protocol:
+                continue
+            st_name = data_utils.rand_name(
+                cls.__name__ + "-share-type-%s" % backend)
             extra_specs = {
-                "share_backend_name": CONF.share.backend_names[i],
+                "share_backend_name": backend,
+                "driver_handles_share_servers":
+                    CONF.share.multitenancy_enabled,
             }
-            st = cls.create_share_type(
-                name=st_name,
-                extra_specs=cls.add_extra_specs_to_dict(extra_specs))
+            st = cls.create_share_type(name=st_name, extra_specs=extra_specs)
             cls.sts.append(st["share_type"])
             st_id = st["share_type"]["id"]
-            share_data_list.append({"kwargs": {"share_type_id": st_id}})
+            share_data_list.append({"kwargs": {
+                "share_type_id": st_id,
+                "share_protocol": share_protocol[0]}})
+
+        if not share_data_list:
+            raise cls.skipException("Enabled protocols not supported by any "
+                                    "of the enabled backends.")
 
         # Create shares using precreated share types
         cls.shares = cls.create_shares(share_data_list)
@@ -69,20 +93,21 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     def test_share_share_type(self):
         # Share type should be the same as provided with share creation
-        for i in [0, 1]:
-            get = self.shares_v2_client.get_share(self.shares[i]['id'],
-                                                  version="2.5")
-            self.assertEqual(self.sts[i]["name"], get["share_type"])
+        for share, share_type in zip(self.shares, self.sts):
+            share_details = self.shares_v2_client.get_share(
+                share['id'], version="2.5")
+            self.assertEqual(share_type["name"], share_details["share_type"])
 
     @decorators.idempotent_id('f25e0cb0-d656-4f16-a761-ec23992cd9e7')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     def test_share_share_type_v_2_6(self):
         # Share type should be the same as provided with share creation
-        for i in [0, 1]:
-            get = self.shares_v2_client.get_share(self.shares[i]['id'],
-                                                  version="2.6")
-            self.assertEqual(self.sts[i]["id"], get["share_type"])
-            self.assertEqual(self.sts[i]["name"], get["share_type_name"])
+        for share, share_type in zip(self.shares, self.sts):
+            share_details = self.shares_v2_client.get_share(
+                share['id'], version="2.6")
+            self.assertEqual(share_type["id"], share_details["share_type"])
+            self.assertEqual(
+                share_type["name"], share_details["share_type_name"])
 
     @decorators.idempotent_id('bfa0c056-0a15-40e1-bdff-f1e10b95736c')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
diff --git a/manila_tempest_tests/tests/api/admin/test_quotas.py b/manila_tempest_tests/tests/api/admin/test_quotas.py
index 25f43a4..481f7c4 100644
--- a/manila_tempest_tests/tests/api/admin/test_quotas.py
+++ b/manila_tempest_tests/tests/api/admin/test_quotas.py
@@ -98,7 +98,7 @@
 
     @decorators.idempotent_id('2e98a13e-b2ed-4977-bafe-47ea48b504f2')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION)
     def test_show_sg_quotas_using_too_old_microversion(self):
         quotas = self.client.show_quotas(
             self.tenant_id, version=PRE_SHARE_GROUPS_MICROVERSION)
@@ -108,7 +108,7 @@
 
     @decorators.idempotent_id('b8bcbc04-68fb-4c8f-9f4c-a3b6c6b8911c')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION)
     def test_show_sg_quotas_for_user_using_too_old_microversion(self):
         quotas = self.client.show_quotas(
             self.tenant_id, self.user_id,
@@ -119,7 +119,7 @@
 
     @decorators.idempotent_id('19fe431b-e83e-4c4e-acb8-018d7a470c8b')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported(
+    @utils.skip_if_microversion_not_supported(
         PRE_SHARE_REPLICA_QUOTAS_MICROVERSION)
     def test_show_replica_quotas_for_user_using_too_old_microversion(self):
         quotas = self.client.show_quotas(
@@ -136,7 +136,7 @@
     @ddt.unpack
     @decorators.idempotent_id('836e1725-2853-4d54-b281-8173773d8527')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_show_share_type_quotas(self, share_type_key, is_st_public):
         # Check if the used microversion supports 'share_replica' and
         # 'replica_gigabytes' quotas
@@ -279,7 +279,8 @@
     @ddt.unpack
     @decorators.idempotent_id('af16dc89-c93d-43de-8902-2c88c75f107f')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported(SHARE_REPLICA_QUOTAS_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(
+        SHARE_REPLICA_QUOTAS_MICROVERSION)
     def test_update_user_quota_replica_related(self, quota_key, use_user_id):
         kwargs = {}
 
@@ -304,7 +305,7 @@
     @ddt.unpack
     @decorators.idempotent_id('155ea3de-b3b5-4aa0-be8b-eebcc19ce874')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_update_share_type_quota(self, share_type_key, is_st_public):
         # Check if the used microversion supports 'share_replica' and
         # 'replica_gigabytes' quotas
@@ -547,7 +548,7 @@
     @ddt.unpack
     @decorators.idempotent_id('15e57302-5a14-4be4-8720-95b639c2bfad')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_reset_share_type_quotas(self, share_type_key, is_st_public):
         share_type = self._create_share_type(is_public=is_st_public)
         quota_keys = ['shares', 'snapshots', 'gigabytes', 'snapshot_gigabytes']
@@ -766,7 +767,7 @@
     @ddt.data(11, -1)
     @decorators.idempotent_id('315cb76f-920d-4cb9-ac7d-16be8e95e1b2')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_update_share_type_quotas_bigger_than_project_quota(self, st_q):
         share_type = self._create_share_type()
 
@@ -782,7 +783,7 @@
 
     @decorators.idempotent_id('c95be1eb-6331-4c37-9fac-ed6c36270457')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_set_share_type_quota_bigger_than_users_quota(self):
         share_type = self._create_share_type()
 
@@ -804,7 +805,7 @@
 
     @decorators.idempotent_id('4687eb25-17b3-4995-ace2-62f8bda29c57')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_quotas_usages(self):
         # Create share types
         st_1, st_2 = (self._create_share_type()
@@ -899,7 +900,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     @testtools.skipUnless(
         CONF.share.run_share_group_tests, 'Share Group tests disabled.')
-    @base.skip_if_microversion_lt(SHARE_GROUPS_MICROVERSION)
+    @utils.skip_if_microversion_lt(SHARE_GROUPS_MICROVERSION)
     def test_share_group_quotas_usages(self):
         # Set quotas for project (3 SG, 1 SGS) and user (2 SG, 1 SGS)
         self.update_quotas(self.tenant_id,
diff --git a/manila_tempest_tests/tests/api/admin/test_quotas_negative.py b/manila_tempest_tests/tests/api/admin/test_quotas_negative.py
index e1e033f..af5cc9c 100644
--- a/manila_tempest_tests/tests/api/admin/test_quotas_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_quotas_negative.py
@@ -262,7 +262,7 @@
     @ddt.unpack
     @decorators.idempotent_id('ed38ab0a-694c-48ea-bce5-5c264f485d5b')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported("2.7")
+    @utils.skip_if_microversion_not_supported("2.7")
     def test_show_quotas_with_wrong_versions(self, url, version, method_name):
         self.assertRaises(lib_exc.NotFound,
                           getattr(self.client, method_name),
@@ -285,7 +285,7 @@
     @ddt.data('show', 'reset', 'update')
     @decorators.idempotent_id('cf45eb7d-7330-4b2d-8214-e4149eb4a398')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_share_type_quotas_using_nonexistent_share_type(self, op):
 
         kwargs = {"share_type": "fake_nonexistent_share_type"}
@@ -301,7 +301,7 @@
     @ddt.data('id', 'name')
     @decorators.idempotent_id('2ba641a1-100b-417e-80e2-d3f717fd3c7c')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_try_update_share_type_quota_for_share_networks(self, key):
         share_type = self._create_share_type()
         tenant_quotas = self.client.show_quotas(self.tenant_id)
@@ -316,7 +316,7 @@
     @ddt.data('share_groups', 'share_group_snapshots')
     @decorators.idempotent_id('5eb6ce15-1172-4bcb-9c7b-91543bf714e8')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_lt(SHARE_GROUPS_MICROVERSION)
+    @utils.skip_if_microversion_lt(SHARE_GROUPS_MICROVERSION)
     def test_try_update_share_type_quota_for_share_groups(self, quota_name):
         share_type = self._create_share_type()
         tenant_quotas = self.client.show_quotas(self.tenant_id)
@@ -330,8 +330,8 @@
     @ddt.data('share_groups', 'share_group_snapshots')
     @decorators.idempotent_id('1b504c74-2ce9-40f6-87fb-9e643b1b5906')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION)
-    @base.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(PRE_SHARE_GROUPS_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(SHARE_GROUPS_MICROVERSION)
     def test_share_group_quotas_using_too_old_microversion(self, quota_key):
         tenant_quotas = self.client.show_quotas(
             self.tenant_id, version=SHARE_GROUPS_MICROVERSION)
@@ -348,7 +348,8 @@
     @ddt.data("share_replicas", "replica_gigabytes")
     @decorators.idempotent_id('66f22d42-37bc-4f9b-8e0b-a679341e1e88')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported(SHARE_REPLICA_QUOTAS_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(
+        SHARE_REPLICA_QUOTAS_MICROVERSION)
     def test_share_replica_quotas_using_too_old_microversion(self, quota_key):
         tenant_quotas = self.client.show_quotas(
             self.tenant_id, version=SHARE_REPLICA_QUOTAS_MICROVERSION)
@@ -365,7 +366,7 @@
     @ddt.data('show', 'reset', 'update')
     @decorators.idempotent_id('acc609c2-f314-4540-984c-33e93d048f6c')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.38")
+    @utils.skip_if_microversion_lt("2.38")
     def test_share_type_quotas_using_too_old_microversion(self, op):
         share_type = self._create_share_type()
         kwargs = {"version": "2.38", "share_type": share_type["name"]}
@@ -381,7 +382,7 @@
     @ddt.data('show', 'reset', 'update')
     @decorators.idempotent_id('719768d1-d313-40e9-9127-c5777840ecbd')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_quotas_providing_share_type_and_user_id(self, op):
         share_type = self._create_share_type()
         kwargs = {"share_type": share_type["name"], "user_id": self.user_id}
@@ -397,7 +398,7 @@
     @ddt.data(11, -1)
     @decorators.idempotent_id('82256511-aa46-4b99-a6e5-8b400534e96d')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.39")
+    @utils.skip_if_microversion_lt("2.39")
     def test_update_share_type_quotas_bigger_than_project_quota(self, st_q):
         share_type = self._create_share_type()
         self.update_quotas(self.tenant_id, shares=10)
diff --git a/manila_tempest_tests/tests/api/admin/test_replication.py b/manila_tempest_tests/tests/api/admin/test_replication.py
index c435da1..6d5b40c 100644
--- a/manila_tempest_tests/tests/api/admin/test_replication.py
+++ b/manila_tempest_tests/tests/api/admin/test_replication.py
@@ -19,6 +19,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -89,7 +90,7 @@
                             LATEST_MICROVERSION]))
     def test_promote_out_of_sync_share_replica(self, version):
         """Test promote 'out_of_sync' share replica to active state."""
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         if (self.replication_type
                 not in constants.REPLICATION_PROMOTION_CHOICES):
             msg = "Option backend_replication_type should be one of (%s)!"
@@ -107,8 +108,9 @@
             share["id"], self.replica_zone, cleanup=False,
             client=self.admin_client, version=version)
         # Wait for replica state to update after creation
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         # List replicas
@@ -124,9 +126,10 @@
         self.admin_client.reset_share_replica_state(
             replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC,
             version=version)
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC,
-            status_attr='replica_state')
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'],
+            constants.REPLICATION_STATE_OUT_OF_SYNC,
+            resource_name='share_replica', status_attr='replica_state')
 
         # Promote 'out_of_sync' replica to 'active' state.
         self.promote_share_replica(replica['id'], self.admin_client,
@@ -150,7 +153,7 @@
                             LATEST_MICROVERSION]))
     def test_force_delete_share_replica(self, version):
         """Test force deleting a replica that is in 'error_deleting' status."""
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         replica = self.create_share_replica(self.share['id'],
                                             self.replica_zone,
                                             cleanup_in_class=False,
@@ -158,8 +161,9 @@
                                             version=version)
         self.admin_client.reset_share_replica_status(
             replica['id'], constants.STATUS_ERROR_DELETING, version=version)
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.STATUS_ERROR_DELETING)
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'], constants.STATUS_ERROR_DELETING,
+            resource_name='share_replica')
         self.admin_client.force_delete_share_replica(replica['id'],
                                                      version=version)
         self.admin_client.wait_for_resource_deletion(replica_id=replica['id'])
@@ -172,7 +176,7 @@
                             LATEST_MICROVERSION]))
     def test_reset_share_replica_status(self, version):
         """Test resetting a replica's 'status' attribute."""
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         replica = self.create_share_replica(self.share['id'],
                                             self.replica_zone,
                                             cleanup_in_class=False,
@@ -181,8 +185,9 @@
         self.admin_client.reset_share_replica_status(replica['id'],
                                                      constants.STATUS_ERROR,
                                                      version=version)
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.STATUS_ERROR)
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'], constants.STATUS_ERROR,
+            resource_name='share_replica')
 
     @decorators.idempotent_id('258844da-a853-42b6-87db-b16e616018c6')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
@@ -192,7 +197,7 @@
                             LATEST_MICROVERSION]))
     def test_reset_share_replica_state(self, version):
         """Test resetting a replica's 'replica_state' attribute."""
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         replica = self.create_share_replica(self.share['id'],
                                             self.replica_zone,
                                             cleanup_in_class=False,
@@ -201,8 +206,9 @@
         self.admin_client.reset_share_replica_state(replica['id'],
                                                     constants.STATUS_ERROR,
                                                     version=version)
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.STATUS_ERROR, status_attr='replica_state')
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'], constants.STATUS_ERROR,
+            resource_name='share_replica', status_attr='replica_state')
 
     @decorators.idempotent_id('2969565a-85e8-4c61-9dfb-cc7f7ca9f6dd')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
@@ -212,26 +218,29 @@
                             LATEST_MICROVERSION]))
     def test_resync_share_replica(self, version):
         """Test resyncing a replica."""
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         replica = self.create_share_replica(self.share['id'],
                                             self.replica_zone,
                                             cleanup_in_class=False,
                                             client=self.admin_client,
                                             version=version)
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         # Set replica_state to 'out_of_sync'.
         self.admin_client.reset_share_replica_state(
             replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC,
             version=version)
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC,
-            status_attr='replica_state')
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'],
+            constants.REPLICATION_STATE_OUT_OF_SYNC,
+            resource_name='share_replica', status_attr='replica_state')
 
         # Attempt resync
         self.admin_client.resync_share_replica(replica['id'], version=version)
-        self.admin_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.admin_client, replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
diff --git a/manila_tempest_tests/tests/api/admin/test_replication_actions.py b/manila_tempest_tests/tests/api/admin/test_replication_actions.py
index 5ceb818..9a3af60 100644
--- a/manila_tempest_tests/tests/api/admin/test_replication_actions.py
+++ b/manila_tempest_tests/tests/api/admin/test_replication_actions.py
@@ -17,6 +17,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -85,8 +86,8 @@
         # Test extend share
         new_size = self.share["size"] + 1
         self.admin_client.extend_share(self.share["id"], new_size)
-        self.admin_client.wait_for_share_status(self.share["id"],
-                                                "available")
+        waiters.wait_for_resource_status(
+            self.admin_client, self.share["id"], "available")
         share = self.admin_client.get_share(self.share["id"])
         self.assertEqual(new_size, int(share["size"]))
 
@@ -98,7 +99,8 @@
         share = self.admin_client.get_share(self.share["id"])
         new_size = self.share["size"] - 1
         self.admin_client.shrink_share(self.share["id"], new_size)
-        self.admin_client.wait_for_share_status(share["id"], "available")
+        waiters.wait_for_resource_status(
+            self.admin_client, share["id"], "available")
         shrink_share = self.admin_client.get_share(self.share["id"])
         self.assertEqual(new_size, int(shrink_share["size"]))
 
@@ -127,8 +129,8 @@
         managed_share = self.admin_client.manage_share(
             share['host'], share['share_proto'],
             export_path, self.share_type_id)
-        self.admin_client.wait_for_share_status(
-            managed_share['id'], 'available')
+        waiters.wait_for_resource_status(
+            self.admin_client, managed_share['id'], 'available')
 
         # Add managed share to cleanup queue
         self.method_resources.insert(
diff --git a/manila_tempest_tests/tests/api/admin/test_scheduler_stats.py b/manila_tempest_tests/tests/api/admin/test_scheduler_stats.py
index 1d0b873..928f193 100644
--- a/manila_tempest_tests/tests/api/admin/test_scheduler_stats.py
+++ b/manila_tempest_tests/tests/api/admin/test_scheduler_stats.py
@@ -19,6 +19,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 CONF = config.CONF
 
@@ -170,7 +171,7 @@
 
     @decorators.idempotent_id('f027fb62-1ec3-4f52-a782-e9dd9db34fda')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.23")
+    @utils.skip_if_microversion_not_supported("2.23")
     @ddt.data((True, "name"), (True, "id"), (False, "name"), (False, "id"))
     @ddt.unpack
     def test_pool_list_with_share_type_filter_with_detail(
@@ -196,7 +197,7 @@
 
     @decorators.idempotent_id('31cb2c99-3abf-4dce-8e66-7fd30b168300')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.23")
+    @utils.skip_if_microversion_not_supported("2.23")
     @ddt.data((True, "name"), (True, "id"), (False, "name"), (False, "id"))
     @ddt.unpack
     def test_pool_list_with_share_type_filter_with_detail_negative(
diff --git a/manila_tempest_tests/tests/api/admin/test_services_negative.py b/manila_tempest_tests/tests/api/admin/test_services_negative.py
index d74c609..e13618d 100644
--- a/manila_tempest_tests/tests/api/admin/test_services_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_services_negative.py
@@ -19,6 +19,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 
 @ddt.ddt
@@ -100,7 +101,7 @@
         ('services', '2.0'),
     )
     @ddt.unpack
-    @base.skip_if_microversion_not_supported("2.7")
+    @utils.skip_if_microversion_not_supported("2.7")
     def test_list_services_with_wrong_versions(self, url, version):
         self.assertRaises(
             lib_exc.NotFound,
diff --git a/manila_tempest_tests/tests/api/admin/test_share_group_types.py b/manila_tempest_tests/tests/api/admin/test_share_group_types.py
index eb62838..56d1881 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_group_types.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_group_types.py
@@ -60,12 +60,12 @@
     @decorators.idempotent_id('e2ba1754-cecc-4178-ad39-eefbb59e4d6d')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data(
-        *itertools.product(('id', 'name'), set(
+        *itertools.product(('id', 'name'), utils.deduplicate(
             [LATEST_MICROVERSION, constants.MIN_SHARE_GROUP_MICROVERSION,
              constants.SHARE_GROUPS_GRADUATION_VERSION])))
     @ddt.unpack
     def test_create_get_delete_share_group_type(self, st_key, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         name = data_utils.rand_name("tempest-manila")
 
         # Create share group type
@@ -151,7 +151,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_update_single_share_group_type_spec(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         name = data_utils.rand_name("tempest-manila")
         group_specs = {'key1': 'value1', 'key2': 'value2'}
 
@@ -207,7 +207,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_delete_single_share_group_type_spec_min(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         name = data_utils.rand_name("tempest-manila")
         group_specs = {'key1': 'value1', 'key2': 'value2'}
 
@@ -237,7 +237,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_private_share_group_type_access(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         name = data_utils.rand_name("tempest-manila")
         group_specs = {"key1": "value1", "key2": "value2"}
         project_id = self.shares_v2_client.tenant_id
@@ -297,7 +297,7 @@
     @ddt.data(*utils.deduplicate(('2.45', '2.46', LATEST_MICROVERSION)))
     def test_share_group_type_create_show_list_with_is_default_key(self,
                                                                    version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         name = data_utils.rand_name("tempest-manila")
 
         # Create share group type
diff --git a/manila_tempest_tests/tests/api/admin/test_share_groups.py b/manila_tempest_tests/tests/api/admin/test_share_groups.py
index 207db8c..a2724b9 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_groups.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_groups.py
@@ -67,7 +67,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_create_share_group_with_single_share_type_min(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         share_group = self.create_share_group(
             share_group_type_id=self.sg_type_id,
             cleanup_in_class=False,
@@ -142,7 +142,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_default_share_group_type_applied(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         try:
             default_type = self.shares_v2_client.get_default_share_group_type(
                 version=version
diff --git a/manila_tempest_tests/tests/api/admin/test_share_instances.py b/manila_tempest_tests/tests/api/admin/test_share_instances.py
index 14cdbab..0833298 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_instances.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_instances.py
@@ -70,7 +70,7 @@
     @ddt.data('2.3', '2.9', '2.10', '2.30', '2.54')
     def test_get_share_instance(self, version):
         """Test that we get the proper keys back for the instance."""
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         share_instances = self.shares_v2_client.get_instances_of_share(
             self.share['id'], version=version,
@@ -105,7 +105,7 @@
     @ddt.data('path', 'id')
     @decorators.idempotent_id('c27b415d-341c-42f0-a269-2c94f69fbee1')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.35")
+    @utils.skip_if_microversion_lt("2.35")
     def test_list_share_instances_with_export_location_path_and_id(
             self, export_location_type):
         share_instances_except = (
diff --git a/manila_tempest_tests/tests/api/admin/test_share_instances_negative.py b/manila_tempest_tests/tests/api/admin/test_share_instances_negative.py
index ebf8364..a01ffb2 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_instances_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_instances_negative.py
@@ -15,6 +15,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 
 @ddt.ddt
@@ -31,7 +32,7 @@
 
     @decorators.idempotent_id('babe885e-a8ab-439d-8b95-e5422983a942')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.34")
+    @utils.skip_if_microversion_not_supported("2.34")
     @ddt.data('path', 'id')
     def test_list_share_instances_with_export_location_and_invalid_version(
             self, export_location_type):
@@ -48,7 +49,7 @@
 
     @decorators.idempotent_id('ce0d045c-e418-42fa-86e4-ead493fc0663')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.35")
+    @utils.skip_if_microversion_lt("2.35")
     @ddt.data('path', 'id')
     def test_list_share_instances_with_export_location_not_exist(
             self, export_location_type):
diff --git a/manila_tempest_tests/tests/api/admin/test_share_manage.py b/manila_tempest_tests/tests/api/admin/test_share_manage.py
index 09bbdb7..a8376c9 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_manage.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_manage.py
@@ -20,6 +20,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -40,17 +41,14 @@
         super(ManageNFSShareTest, cls).skip_checks()
         if not CONF.share.run_manage_unmanage_tests:
             raise cls.skipException('Manage/unmanage tests are disabled.')
-
-    @classmethod
-    def resource_setup(cls):
         if cls.protocol not in CONF.share.enable_protocols:
             message = "%s tests are disabled" % cls.protocol
             raise cls.skipException(message)
-
         utils.skip_if_manage_not_supported_for_version()
 
+    @classmethod
+    def resource_setup(cls):
         super(ManageNFSShareTest, cls).resource_setup()
-
         # Create share type
         cls.st_name = data_utils.rand_name("manage-st-name")
         cls.extra_specs = {
@@ -110,8 +108,9 @@
                 'client': self.shares_client})
 
         # Wait for success
-        self.shares_v2_client.wait_for_share_status(managed_share['id'],
-                                                    constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(self.shares_v2_client,
+                                         managed_share['id'],
+                                         constants.STATUS_AVAILABLE)
 
         # Verify data of managed share
         self.assertEqual(name, managed_share['name'])
@@ -149,19 +148,19 @@
 
     @decorators.idempotent_id('15b654d0-34ed-4154-9f5f-b96d2e4e9d1c')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.5")
+    @utils.skip_if_microversion_not_supported("2.5")
     def test_manage_with_os_share_manage_url(self):
         self._test_manage(version="2.5")
 
     @decorators.idempotent_id('8c0beefb-19da-441e-b73f-d90eb8000ff3')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.8")
+    @utils.skip_if_microversion_not_supported("2.8")
     def test_manage_with_is_public_True(self):
         self._test_manage(is_public=True, version="2.8")
 
     @decorators.idempotent_id('da7b7a4f-6693-4460-bdb7-1f8d42032bc6')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.16")
+    @utils.skip_if_microversion_not_supported("2.16")
     def test_manage_show_user_id(self):
         self._test_manage(version="2.16")
 
diff --git a/manila_tempest_tests/tests/api/admin/test_share_manage_negative.py b/manila_tempest_tests/tests/api/admin/test_share_manage_negative.py
index cc97dd6..39b9557 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_manage_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_manage_negative.py
@@ -21,6 +21,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -39,19 +40,17 @@
         super(ManageNFSShareNegativeTest, cls).skip_checks()
         if not CONF.share.run_manage_unmanage_tests:
             raise cls.skipException('Manage/unmanage tests are disabled.')
-
-    @classmethod
-    def resource_setup(cls):
         if cls.protocol not in CONF.share.enable_protocols:
             message = "%s tests are disabled" % cls.protocol
             raise cls.skipException(message)
 
         utils.skip_if_manage_not_supported_for_version()
 
+    @classmethod
+    def resource_setup(cls):
         super(ManageNFSShareNegativeTest, cls).resource_setup()
-
         # Create share type
-        cls.st_name = data_utils.rand_name("manage-st-name")
+        cls.st_name = data_utils.rand_name(name="manage-st-name")
         cls.extra_specs = {
             'storage_protocol': CONF.share.capability_storage_protocol,
             'driver_handles_share_servers': CONF.share.multitenancy_enabled,
@@ -68,8 +67,8 @@
         # Manage the share and wait for the expected state.
         # Return the managed share object.
         managed_share = self.shares_v2_client.manage_share(**params)
-        self.shares_v2_client.wait_for_share_status(
-            managed_share['id'], state)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, managed_share['id'], state)
 
         return managed_share
 
@@ -156,7 +155,8 @@
         self._unmanage_share_and_wait(share)
 
         for invalid_key, invalid_value in (
-            ('export_path', 'invalid_export'),
+            ('export_path',
+             data_utils.rand_name(name='invalid-share-export')),
             ('protocol', 'invalid_protocol'),
         ):
 
@@ -169,8 +169,9 @@
             invalid_share = self.shares_v2_client.manage_share(
                 **invalid_params
             )
-            self.shares_v2_client.wait_for_share_status(
-                invalid_share['id'], constants.STATUS_MANAGE_ERROR)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, invalid_share['id'],
+                constants.STATUS_MANAGE_ERROR)
 
             # cleanup
             self._unmanage_share_and_wait(invalid_share)
@@ -258,11 +259,14 @@
 
         # forge bad param to have a share in manage_error state
         invalid_params = valid_params.copy()
-        invalid_params.update({'export_path': 'invalid'})
+        invalid_params.update(
+            {'export_path': data_utils.rand_name(name='invalid-share-export')}
+        )
         invalid_share = self.shares_v2_client.manage_share(**invalid_params)
 
-        self.shares_v2_client.wait_for_share_status(
-            invalid_share['id'], constants.STATUS_MANAGE_ERROR)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, invalid_share['id'],
+            constants.STATUS_MANAGE_ERROR)
         self._unmanage_share_and_wait(share)
 
         # the attempt to delete a share in manage_error should raise an
diff --git a/manila_tempest_tests/tests/api/admin/test_share_networks.py b/manila_tempest_tests/tests/api/admin/test_share_networks.py
index 2ff9533..4bb71a2 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_networks.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_networks.py
@@ -43,6 +43,7 @@
         }
         cls.sn_with_ldap_ss = cls.create_share_network(
             cleanup_in_class=True,
+            add_security_services=False,
             **cls.data_sn_with_ldap_ss)
 
         cls.shares_client.add_sec_service_to_share_network(
@@ -68,7 +69,9 @@
 
         cls.sn_with_kerberos_ss = (
             cls.alt_shares_v2_client.create_share_network(
-                cleanup_in_class=True, **cls.data_sn_with_kerberos_ss)
+                cleanup_in_class=True,
+                add_security_services=False,
+                **cls.data_sn_with_kerberos_ss)
         )
 
         cls.alt_shares_v2_client.add_sec_service_to_share_network(
diff --git a/manila_tempest_tests/tests/api/admin/test_share_servers.py b/manila_tempest_tests/tests/api/admin/test_share_servers.py
index b5fe72c..d48c383 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_servers.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_servers.py
@@ -24,6 +24,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -328,9 +329,9 @@
                 share_server['id'],
                 status=state,
             )
-            self.shares_v2_client.wait_for_share_server_status(
-                share_server['id'],
-                status=state
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, share_server['id'], state,
+                resource_name="share_server"
             )
 
         # bring the share server back in the active state
@@ -338,9 +339,9 @@
             share_server['id'],
             status=constants.SERVER_STATE_ACTIVE,
         )
-        self.shares_v2_client.wait_for_share_server_status(
-            share_server['id'],
-            status=constants.SERVER_STATE_ACTIVE
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_server['id'],
+            constants.SERVER_STATE_ACTIVE, resource_name="share_server"
         )
 
         # delete share
diff --git a/manila_tempest_tests/tests/api/admin/test_share_servers_manage_negative.py b/manila_tempest_tests/tests/api/admin/test_share_servers_manage_negative.py
index 0b04511..3b2db49 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_servers_manage_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_servers_manage_negative.py
@@ -22,6 +22,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -164,9 +165,9 @@
                 share['share_server_id'],
                 status=constants.SERVER_STATE_ACTIVE,
             )
-            self.shares_v2_client.wait_for_share_server_status(
-                share['share_server_id'],
-                constants.SERVER_STATE_ACTIVE,
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, share['share_server_id'],
+                constants.SERVER_STATE_ACTIVE, resource_name='share_server'
             )
 
         # delete share
@@ -206,9 +207,9 @@
                 share['share_server_id'],
                 status=constants.SERVER_STATE_ACTIVE,
             )
-            self.shares_v2_client.wait_for_share_server_status(
-                share['share_server_id'],
-                constants.SERVER_STATE_ACTIVE,
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, share['share_server_id'],
+                constants.SERVER_STATE_ACTIVE, resource_name='share_server'
             )
 
         # delete share
diff --git a/manila_tempest_tests/tests/api/admin/test_share_servers_migration.py b/manila_tempest_tests/tests/api/admin/test_share_servers_migration.py
index e162b17..ba774d3 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_servers_migration.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_servers_migration.py
@@ -20,6 +20,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -96,8 +97,8 @@
                 access_to=rule.get('access_to'),
                 access_level=rule.get('access_level')
             )
-        self.shares_v2_client.wait_for_share_status(
-            share['id'], constants.RULE_STATE_ACTIVE,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'], constants.RULE_STATE_ACTIVE,
             status_attr='access_rules_status')
 
         share = self.shares_v2_client.get_share(share['id'])
@@ -140,8 +141,10 @@
 
         # Check the snapshot status if possible.
         if snapshot_id:
-            self.shares_v2_client.wait_for_snapshot_status(
-                snapshot_id, constants.STATUS_AVAILABLE)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, snapshot_id, constants.STATUS_AVAILABLE,
+                resource_name='snapshot'
+            )
 
         # Check the share server destination status.
         dest_server = self.shares_v2_client.show_share_server(dest_server_id)
@@ -276,8 +279,12 @@
             src_server_id, dest_host, preserve_snapshots=preserve_snapshots)
 
         expected_state = constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE
-        self.shares_v2_client.wait_for_share_server_status(
-            src_server_id, expected_state, status_attr='task_state')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, src_server_id,
+            expected_state, resource_name='share_server',
+            status_attr='task_state'
+        )
+
         # Get for the destination share server.
         dest_server_id = self._get_share_server_destination_for_migration(
             src_server_id)
@@ -295,8 +302,10 @@
 
         # Wait for the migration cancelled status.
         expected_state = constants.TASK_STATE_MIGRATION_CANCELLED
-        self.shares_v2_client.wait_for_share_server_status(
-            src_server_id, expected_state, status_attr='task_state')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, src_server_id,
+            expected_state, resource_name='share_server',
+            status_attr='task_state')
 
         # After the cancel operation, we need to validate again the resources.
         expected_status = constants.STATUS_AVAILABLE
@@ -339,8 +348,11 @@
             preserve_snapshots=preserve_snapshots)
 
         expected_state = constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE
-        self.shares_v2_client.wait_for_share_server_status(
-            src_server_id, expected_state, status_attr='task_state')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, src_server_id,
+            expected_state, resource_name='share_server',
+            status_attr='task_state'
+        )
         # Get for the destination share server.
         dest_server_id = self._get_share_server_destination_for_migration(
             src_server_id)
@@ -358,8 +370,10 @@
 
         # It's necessary wait for the destination server went to active status.
         expected_status = constants.SERVER_STATE_ACTIVE
-        self.shares_v2_client.wait_for_share_server_status(
-            dest_server_id, expected_status)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, dest_server_id, expected_status,
+            resource_name='share_server'
+        )
 
         # Check if the source server went to inactive status if it exists.
         try:
diff --git a/manila_tempest_tests/tests/api/admin/test_share_servers_migration_negative.py b/manila_tempest_tests/tests/api/admin/test_share_servers_migration_negative.py
index 3b39a61..72ee2c9 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_servers_migration_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_servers_migration_negative.py
@@ -21,6 +21,7 @@
 
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api.admin import test_share_servers_migration
 from manila_tempest_tests.tests.api import base
 
@@ -245,11 +246,13 @@
     def resource_cleanup(cls):
         states = [constants.TASK_STATE_MIGRATION_DRIVER_IN_PROGRESS,
                   constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE]
-        cls.shares_v2_client.wait_for_share_server_status(
-            cls.server_id, status=states, status_attr="task_state")
+        waiters.wait_for_resource_status(
+            cls.shares_v2_client, cls.server_id, states,
+            resource_name="share_server",
+            status_attr="task_state")
         cls.shares_v2_client.share_server_migration_cancel(cls.server_id)
-        cls.shares_v2_client.wait_for_share_status(cls.share['id'],
-                                                   status="available")
+        waiters.wait_for_resource_status(
+            cls.shares_v2_client, cls.share['id'], status="available")
         super(ShareServerMigrationStartNegativesNFS, cls).resource_cleanup()
 
     @decorators.idempotent_id('5b904db3-fc36-4c35-a8ef-cf6b80315388')
diff --git a/manila_tempest_tests/tests/api/admin/test_share_snapshot_instances.py b/manila_tempest_tests/tests/api/admin/test_share_snapshot_instances.py
index d09b25f..0aec375 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_snapshot_instances.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_snapshot_instances.py
@@ -18,6 +18,7 @@
 from tempest.lib import decorators
 from testtools import testcase as tc
 
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -128,8 +129,9 @@
         for status in ("error", "available"):
             self.shares_v2_client.reset_snapshot_instance_status(
                 sii, status=status)
-            self.shares_v2_client.wait_for_snapshot_instance_status(
-                sii, expected_status=status)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, sii, status,
+                resource_name='snapshot_instance')
         self.shares_v2_client.delete_snapshot(snapshot['id'])
         self.shares_v2_client.wait_for_resource_deletion(
             snapshot_id=snapshot['id'])
diff --git a/manila_tempest_tests/tests/api/admin/test_share_types.py b/manila_tempest_tests/tests/api/admin/test_share_types.py
index cb97bc5..560a41e 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_types.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_types.py
@@ -20,6 +20,7 @@
 from tempest.lib import exceptions as lib_exc
 from testtools import testcase as tc
 
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -72,7 +73,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data('2.0', '2.6', '2.7', '2.40', '2.41')
     def test_share_type_create_get(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         name = data_utils.rand_name("tempest-manila")
         description = None
@@ -101,7 +102,7 @@
         # Check that backwards compatibility didn't break
         self.assertDictMatch(get["volume_type"], get["share_type"])
 
-    @base.skip_if_microversion_lt("2.50")
+    @utils.skip_if_microversion_lt("2.50")
     @decorators.idempotent_id('a9af19e1-e789-4c4f-a39b-dd8df6ed00b1')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data(
@@ -150,7 +151,7 @@
                 st_is_public,
                 updated_st["share_type"]["share_type_access:is_public"])
 
-    @base.skip_if_microversion_lt("2.50")
+    @utils.skip_if_microversion_lt("2.50")
     @decorators.idempotent_id('9019dc61-b2b1-472d-9b15-a3986439d4c3')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data(
@@ -185,7 +186,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data('2.0', '2.6', '2.7', '2.40', '2.41')
     def test_share_type_create_list(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         name = data_utils.rand_name("tempest-manila")
         description = None
@@ -233,7 +234,8 @@
         share = self.create_share(
             name=share_name, share_type_id=st_create["share_type"]["id"])
         self.assertEqual(share["name"], share_name)
-        self.shares_client.wait_for_share_status(share["id"], "available")
+        waiters.wait_for_resource_status(
+            self.shares_client, share["id"], "available")
 
         # Verify share info
         get = self.shares_v2_client.get_share(share["id"], version="2.5")
@@ -298,7 +300,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data(*utils.deduplicate(('2.45', '2.46', LATEST_MICROVERSION)))
     def test_share_type_create_show_list_with_is_default_key(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         name = data_utils.rand_name("tempest-manila")
         extra_specs = self.add_extra_specs_to_dict()
 
diff --git a/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs.py b/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs.py
index ec65ab9..7d94157 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs.py
@@ -128,7 +128,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data(*utils.deduplicate(['2.24', LATEST_MICROVERSION]))
     def test_delete_snapshot_support_extra_spec(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         # Delete one extra spec for share type
         self.shares_v2_client.delete_share_type_extra_spec(
             self.st_id, 'snapshot_support', version=version)
diff --git a/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs_negative.py b/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs_negative.py
index 8d2893c..9a94edf 100644
--- a/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_share_types_extra_specs_negative.py
@@ -336,7 +336,7 @@
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
     @ddt.data('2.0', '2.23')
     def test_try_delete_required_spec_snapshot_support_version(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         st = self._create_share_type()
         # Try delete extra spec 'snapshot_support'
         self.assertRaises(
diff --git a/manila_tempest_tests/tests/api/admin/test_shares_actions.py b/manila_tempest_tests/tests/api/admin/test_shares_actions.py
index d6b9d22..e7a5c4d 100644
--- a/manila_tempest_tests/tests/api/admin/test_shares_actions.py
+++ b/manila_tempest_tests/tests/api/admin/test_shares_actions.py
@@ -21,6 +21,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 CONF = config.CONF
 
@@ -242,7 +243,7 @@
         for share in shares:
             self.assertEqual(filters['host'], share['host'])
 
-    @base.skip_if_microversion_lt("2.35")
+    @utils.skip_if_microversion_lt("2.35")
     @ddt.data(('path', True), ('id', True), ('path', False), ('id', False))
     @ddt.unpack
     @decorators.idempotent_id('a27e5e3f-451f-4200-af38-99a562ccbe86')
diff --git a/manila_tempest_tests/tests/api/admin/test_snapshot_manage.py b/manila_tempest_tests/tests/api/admin/test_snapshot_manage.py
index 8e97887..15bd0f6 100644
--- a/manila_tempest_tests/tests/api/admin/test_snapshot_manage.py
+++ b/manila_tempest_tests/tests/api/admin/test_snapshot_manage.py
@@ -21,6 +21,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -99,9 +100,9 @@
                 'client': self.shares_v2_client})
 
         # Wait for success
-        self.shares_v2_client.wait_for_snapshot_status(
-            snapshot['id'],
-            constants.STATUS_AVAILABLE
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, snapshot['id'], constants.STATUS_AVAILABLE,
+            resource_name='snapshot'
         )
 
         # Verify manage snapshot API response
@@ -144,7 +145,7 @@
         utils.skip_if_manage_not_supported_for_version(version)
 
         # Skip in case specified version is not supported
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         snap_name = data_utils.rand_name("tempest-snapshot-name")
         snap_desc = data_utils.rand_name("tempest-snapshot-description")
diff --git a/manila_tempest_tests/tests/api/admin/test_snapshot_manage_negative.py b/manila_tempest_tests/tests/api/admin/test_snapshot_manage_negative.py
index 68ed242..9d6c8e5 100644
--- a/manila_tempest_tests/tests/api/admin/test_snapshot_manage_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_snapshot_manage_negative.py
@@ -22,6 +22,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -32,7 +33,7 @@
     protocol = 'nfs'
 
     @classmethod
-    @base.skip_if_microversion_lt("2.12")
+    @utils.skip_if_microversion_lt("2.12")
     @testtools.skipUnless(
         CONF.share.run_manage_unmanage_snapshot_tests,
         "Manage/unmanage snapshot tests are disabled.")
@@ -120,9 +121,9 @@
             'invalid_provider_location',
             driver_options={}
         )
-        self.shares_v2_client.wait_for_snapshot_status(
-            invalid_snap['id'],
-            constants.STATUS_MANAGE_ERROR
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, invalid_snap['id'],
+            constants.STATUS_MANAGE_ERROR, resource_name='snapshot'
         )
         self.shares_v2_client.unmanage_snapshot(invalid_snap['id'])
 
@@ -131,9 +132,9 @@
             self.share['id'],
             snap['provider_location']
         )
-        self.shares_v2_client.wait_for_snapshot_status(
-            managed_snap['id'],
-            constants.STATUS_AVAILABLE
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, managed_snap['id'],
+            constants.STATUS_AVAILABLE, resource_name='snapshot'
         )
         self._delete_snapshot_and_wait(managed_snap)
 
diff --git a/manila_tempest_tests/tests/api/admin/test_user_messages.py b/manila_tempest_tests/tests/api/admin/test_user_messages.py
index 80c992c..81ccd8a 100644
--- a/manila_tempest_tests/tests/api/admin/test_user_messages.py
+++ b/manila_tempest_tests/tests/api/admin/test_user_messages.py
@@ -121,7 +121,7 @@
             message_id=self.message['id'])
 
     @decorators.attr(type=[base.TAG_POSITIVE, base.TAG_API])
-    @base.skip_if_microversion_not_supported(QUERY_BY_TIMESTAMP_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(QUERY_BY_TIMESTAMP_MICROVERSION)
     @decorators.idempotent_id('2ed0c40e-cdaa-471b-97d4-5ebe3fb040e9')
     def test_list_messages_with_since_and_before_filters(self):
         new_message = self.create_user_message()
diff --git a/manila_tempest_tests/tests/api/admin/test_user_messages_negative.py b/manila_tempest_tests/tests/api/admin/test_user_messages_negative.py
index d8fcd5d..a1ad386 100644
--- a/manila_tempest_tests/tests/api/admin/test_user_messages_negative.py
+++ b/manila_tempest_tests/tests/api/admin/test_user_messages_negative.py
@@ -64,7 +64,7 @@
                           six.text_type(uuidutils.generate_uuid()))
 
     @decorators.attr(type=[base.TAG_NEGATIVE, base.TAG_API])
-    @base.skip_if_microversion_not_supported(QUERY_BY_TIMESTAMP_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(QUERY_BY_TIMESTAMP_MICROVERSION)
     @decorators.idempotent_id('03e80563-1a36-408e-baa8-0e3ed46f7a0a')
     def test_list_messages_with_invalid_time_format(self):
         params_key = ['created_since', 'created_before']
diff --git a/manila_tempest_tests/tests/api/base.py b/manila_tempest_tests/tests/api/base.py
index fe4616e..dbcb1ff 100755
--- a/manila_tempest_tests/tests/api/base.py
+++ b/manila_tempest_tests/tests/api/base.py
@@ -27,6 +27,7 @@
 
 from manila_tempest_tests import clients
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests import utils
 
@@ -114,18 +115,6 @@
     # a stable class is used, which includes plugin registered services as well
     client_manager = clients.Clients
 
-    def skip_if_microversion_not_supported(self, microversion):
-        if not utils.is_microversion_supported(microversion):
-            raise self.skipException(
-                "Microversion '%s' is not supported." % microversion)
-
-    def skip_if_microversion_lt(self, microversion):
-        if utils.is_microversion_lt(CONF.share.max_api_microversion,
-                                    microversion):
-            raise self.skipException(
-                "Microversion must be greater than or equal to '%s'." %
-                microversion)
-
     @classmethod
     def skip_checks(cls):
         super(BaseSharesTest, cls).skip_checks()
@@ -399,24 +388,24 @@
             nondisruptive=nondisruptive, preserve_snapshots=preserve_snapshots,
             new_share_network_id=new_share_network_id,
             new_share_type_id=new_share_type_id, **kwargs)
-        share = client.wait_for_migration_status(
-            share_id, dest_host, wait_for_status, **kwargs)
+        share = waiters.wait_for_migration_status(
+            client, share_id, dest_host, wait_for_status, **kwargs)
         return share
 
     @classmethod
     def migration_complete(cls, share_id, dest_host, client=None, **kwargs):
         client = client or cls.shares_v2_client
         client.migration_complete(share_id, **kwargs)
-        share = client.wait_for_migration_status(
-            share_id, dest_host, 'migration_success', **kwargs)
+        share = waiters.wait_for_migration_status(
+            client, share_id, dest_host, 'migration_success', **kwargs)
         return share
 
     @classmethod
     def migration_cancel(cls, share_id, dest_host, client=None, **kwargs):
         client = client or cls.shares_v2_client
         client.migration_cancel(share_id, **kwargs)
-        share = client.wait_for_migration_status(
-            share_id, dest_host, 'migration_cancelled', **kwargs)
+        share = waiters.wait_for_migration_status(
+            client, share_id, dest_host, 'migration_cancelled', **kwargs)
         return share
 
     @classmethod
@@ -480,7 +469,8 @@
                 client = d["kwargs"]["client"]
                 share_id = d["share"]["id"]
                 try:
-                    client.wait_for_share_status(share_id, "available")
+                    waiters.wait_for_resource_status(
+                        client, share_id, "available")
                     d["available"] = True
                 except (share_exceptions.ShareBuildErrorException,
                         exceptions.TimeoutException) as e:
@@ -539,7 +529,9 @@
                 else:
                     cls.method_resources.insert(0, resource)
 
-        client.wait_for_share_group_status(share_group['id'], 'available')
+        waiters.wait_for_resource_status(
+            client, share_group['id'], 'available',
+            resource_name='share_group')
         return share_group
 
     @classmethod
@@ -588,7 +580,8 @@
             cls.class_resources.insert(0, resource)
         else:
             cls.method_resources.insert(0, resource)
-        client.wait_for_snapshot_status(snapshot["id"], "available")
+        waiters.wait_for_resource_status(client, snapshot["id"], "available",
+                                         resource_name='snapshot')
         return snapshot
 
     @classmethod
@@ -609,8 +602,9 @@
             cls.class_resources.insert(0, resource)
         else:
             cls.method_resources.insert(0, resource)
-        client.wait_for_share_group_snapshot_status(
-            sg_snapshot["id"], "available")
+        waiters.wait_for_resource_status(
+            client, sg_snapshot["id"], "available",
+            resource_name="share_group_snapshot")
         return sg_snapshot
 
     @classmethod
@@ -696,8 +690,9 @@
                 cls.class_resources.insert(0, resource)
             else:
                 cls.method_resources.insert(0, resource)
-        client.wait_for_share_replica_status(
-            replica["id"], constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            client, replica["id"], constants.STATUS_AVAILABLE,
+            resource_name='share_replica')
         return replica
 
     @classmethod
@@ -715,10 +710,9 @@
                               version=CONF.share.max_api_microversion):
         client = client or cls.shares_v2_client
         replica = client.promote_share_replica(replica_id, version=version)
-        client.wait_for_share_replica_status(
-            replica["id"],
-            constants.REPLICATION_STATE_ACTIVE,
-            status_attr="replica_state")
+        waiters.wait_for_resource_status(
+            client, replica["id"], constants.REPLICATION_STATE_ACTIVE,
+            resource_name='share_replica', status_attr="replica_state")
         return replica
 
     @classmethod
@@ -1084,8 +1078,9 @@
                   'share_network_id': self.shares_v2_client.share_network_id}
         share = self.shares_v2_client.create_share(**params)
         self.addCleanup(self.shares_v2_client.delete_share, share['id'])
-        self.shares_v2_client.wait_for_share_status(share['id'], "error")
-        return self.shares_v2_client.wait_for_message(share['id'])
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'], "error")
+        return waiters.wait_for_message(self.shares_v2_client, share['id'])
 
     def allow_access(self, share_id, client=None, access_type=None,
                      access_level='rw', access_to=None, status='active',
@@ -1098,8 +1093,10 @@
 
         rule = client.create_access_rule(share_id, access_type, access_to,
                                          access_level)
-        client.wait_for_access_rule_status(share_id, rule['id'], status,
-                                           raise_rule_in_error_state)
+        waiters.wait_for_resource_status(
+            client, share_id, status, resource_name='access_rule',
+            rule_id=rule['id'],
+            raise_rule_in_error_state=raise_rule_in_error_state)
         if cleanup:
             self.addCleanup(client.wait_for_resource_deletion,
                             rule_id=rule['id'], share_id=share_id)
@@ -1107,11 +1104,6 @@
         return rule
 
 
-class BaseSharesAltTest(BaseSharesTest):
-    """Base test case class for all Shares Alt API tests."""
-    credentials = ('alt', )
-
-
 class BaseSharesAdminTest(BaseSharesTest):
     """Base test case class for all Shares Admin API tests."""
     credentials = ('admin', )
@@ -1186,8 +1178,9 @@
             description=description,
             share_server_id=share_server_id
         )
-        self.shares_v2_client.wait_for_share_status(
-            managed_share['id'], constants.STATUS_AVAILABLE
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, managed_share['id'],
+            constants.STATUS_AVAILABLE
         )
 
         return managed_share
@@ -1207,9 +1200,9 @@
             params.get('identifier', share_server['identifier']),
             share_network_subnet_id=subnet_id,
         )
-        self.shares_v2_client.wait_for_share_server_status(
-            managed_share_server['id'],
-            constants.SERVER_STATE_ACTIVE,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, managed_share_server['id'],
+            constants.SERVER_STATE_ACTIVE, resource_name='share_server'
         )
 
         return managed_share_server
@@ -1263,7 +1256,8 @@
         cls.os_admin.domains_client = (
             cls.os_admin.identity_v3.DomainsClient() if
             CONF.identity.auth_version == 'v3' else None)
-        cls.admin_project_member_client = cls.create_user_and_get_client()
+        cls.admin_project_member_client = cls.create_user_and_get_client(
+            project=cls.admin_project, add_member_role=True)
 
         if CONF.share.multitenancy_enabled:
             admin_share_network_id = cls.provide_share_network(
@@ -1278,7 +1272,7 @@
             cls.alt_shares_v2_client.share_network_id = alt_share_network_id
 
     @classmethod
-    def create_user_and_get_client(cls, project=None):
+    def create_user_and_get_client(cls, project=None, add_member_role=True):
         """Create a user in specified project & set share clients for user
 
         The user will have all roles specified in tempest.conf
@@ -1303,9 +1297,12 @@
             username, password, project, email)
         cls.class_project_users_created.append(user)
 
-        for conf_role in CONF.auth.tempest_roles:
-            cls.os_admin.creds_client.assign_user_role(
-                user, project, conf_role)
+        tempest_roles_to_assign = CONF.auth.tempest_roles or []
+        if "member" not in tempest_roles_to_assign and add_member_role:
+            tempest_roles_to_assign.append("member")
+
+        for role in tempest_roles_to_assign:
+            cls.os_admin.creds_client.assign_user_role(user, project, role)
 
         user_creds = cls.os_admin.creds_client.get_credentials(
             user, project, password)
diff --git a/manila_tempest_tests/tests/api/test_access_rules_metadata.py b/manila_tempest_tests/tests/api/test_access_rules_metadata.py
index bb6f7cd..9913fc8 100644
--- a/manila_tempest_tests/tests/api/test_access_rules_metadata.py
+++ b/manila_tempest_tests/tests/api/test_access_rules_metadata.py
@@ -87,7 +87,7 @@
             self.access_to[self.access_type].pop(), 'rw', metadata=data)
 
         # read metadata
-        get_access = self.shares_v2_client.get_access(access["id"])
+        get_access = self.shares_v2_client.get_access_rule(access["id"])
 
         # verify metadata
         self.assertEqual(data, get_access['metadata'])
@@ -97,7 +97,7 @@
             self.shares_v2_client.delete_access_metadata(access["id"], key)
 
         # verify deletion of metadata
-        access_without_md = self.shares_v2_client.get_access(access["id"])
+        access_without_md = self.shares_v2_client.get_access_rule(access["id"])
         self.assertEqual({}, access_without_md['metadata'])
         self.shares_v2_client.delete_access_rule(self.share["id"],
                                                  access["id"])
@@ -113,7 +113,7 @@
         self.shares_v2_client.update_access_metadata(
             access_id=self.access['id'], metadata=md2)
         # get metadata
-        get_access = self.shares_v2_client.get_access(self.access['id'])
+        get_access = self.shares_v2_client.get_access_rule(self.access['id'])
 
         # verify metadata
         self.md1.update(md2)
diff --git a/manila_tempest_tests/tests/api/test_availability_zones.py b/manila_tempest_tests/tests/api/test_availability_zones.py
index 842d9e1..e344ab3 100644
--- a/manila_tempest_tests/tests/api/test_availability_zones.py
+++ b/manila_tempest_tests/tests/api/test_availability_zones.py
@@ -17,6 +17,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 
 class AvailabilityZonesTest(base.BaseSharesTest):
@@ -39,7 +40,7 @@
 
     @decorators.idempotent_id('7054f2f4-bc77-4d60-82a6-2f23b93d281e')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported("2.6")
+    @utils.skip_if_microversion_not_supported("2.6")
     def test_list_availability_zones_legacy_url_api_v2(self):
         # NOTE(vponomaryov): remove this test with removal of availability zone
         # extension url support.
@@ -49,7 +50,7 @@
 
     @decorators.idempotent_id('4caadb86-2988-4adb-b705-aece99235c1e')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported("2.7")
+    @utils.skip_if_microversion_not_supported("2.7")
     def test_list_availability_zones(self):
         azs = self.shares_v2_client.list_availability_zones(version='2.7')
         self._list_availability_zones_assertions(azs)
diff --git a/manila_tempest_tests/tests/api/test_public_shares.py b/manila_tempest_tests/tests/api/test_public_shares.py
new file mode 100644
index 0000000..6b0ef69
--- /dev/null
+++ b/manila_tempest_tests/tests/api/test_public_shares.py
@@ -0,0 +1,106 @@
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from tempest import config
+from tempest.lib.common.utils import data_utils
+from tempest.lib import decorators
+from testtools import testcase as tc
+
+from manila_tempest_tests.tests.api import base
+
+CONF = config.CONF
+LATEST_MICROVERSION = CONF.share.max_api_microversion
+
+
+class PublicSharesTest(base.BaseSharesMixedTest):
+
+    @classmethod
+    def resource_setup(cls):
+        super(PublicSharesTest, cls).resource_setup()
+        # create share_type
+        share_type = cls._create_share_type()
+        cls.share_type_id = share_type['id']
+
+    @decorators.idempotent_id('557a0474-9e30-47b4-a766-19e2afb13e66')
+    @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
+    def test_list_shares_public_with_detail(self):
+        # The default RBAC policy in manila only allows admin users with
+        # system scope to create public shares since the Stein release
+        public_share = self.create_share(
+            name='public_share - must be visible to all projects in the cloud',
+            description='public_share_desc',
+            share_type_id=self.share_type_id,
+            is_public=True,
+            cleanup_in_class=False,
+            client=self.admin_shares_v2_client,
+            version=LATEST_MICROVERSION
+        )
+        private_share = self.create_share(
+            name='private_share',
+            description='private share in the primary user project',
+            share_type_id=self.share_type_id,
+            is_public=False,
+            cleanup_in_class=False,
+            version=LATEST_MICROVERSION
+        )
+
+        params = {'is_public': True}
+        shares = self.alt_shares_v2_client.list_shares_with_detail(params)
+
+        keys = [
+            'status', 'description', 'links', 'availability_zone',
+            'created_at', 'share_proto', 'name', 'snapshot_id', 'id',
+            'size', 'project_id', 'is_public',
+        ]
+        [self.assertIn(key, sh.keys()) for sh in shares for key in keys]
+
+        retrieved_public_share = [
+            share for share in shares if share['id'] == public_share['id']
+        ]
+        msg = 'expected id lists %s times in share list' % (
+            len(retrieved_public_share))
+        self.assertEqual(1, len(retrieved_public_share), msg)
+        self.assertTrue(retrieved_public_share[0]['is_public'])
+
+        self.assertFalse(any([s['id'] == private_share['id'] for s in shares]))
+
+    @decorators.idempotent_id('e073182e-459d-4e08-9300-5bc964ca806b')
+    @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
+    def test_update_share_set_is_public(self):
+        share_name = data_utils.rand_name('tempest-share-name')
+        share = self.create_share(name=share_name,
+                                  description='a share we will update',
+                                  share_type_id=self.share_type_id,
+                                  is_public=False,
+                                  cleanup_in_class=False,
+                                  version=LATEST_MICROVERSION)
+
+        share = self.shares_v2_client.get_share(share['id'])
+        self.assertEqual(share_name, share['name'])
+        self.assertEqual('a share we will update', share['description'])
+        self.assertFalse(share['is_public'])
+
+        # update share, manila's default RBAC only allows administrator
+        # users with a system scope token to update a private share to public
+        new_name = data_utils.rand_name('tempest-new-share-name')
+        new_desc = 'share is now updated'
+        updated = self.admin_shares_v2_client.update_share(
+            share['id'], name=new_name, desc=new_desc, is_public=True)
+        self.assertEqual(new_name, updated['name'])
+        self.assertEqual(new_desc, updated['description'])
+        self.assertTrue(updated['is_public'])
+
+        # this share must now be publicly accessible
+        share = self.alt_shares_v2_client.get_share(share['id'])
+        self.assertEqual(new_name, share['name'])
+        self.assertEqual(new_desc, share['description'])
+        self.assertTrue(share['is_public'])
diff --git a/manila_tempest_tests/tests/api/test_public_shares_negative.py b/manila_tempest_tests/tests/api/test_public_shares_negative.py
new file mode 100644
index 0000000..6d99f13
--- /dev/null
+++ b/manila_tempest_tests/tests/api/test_public_shares_negative.py
@@ -0,0 +1,84 @@
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+from tempest.lib import decorators
+from tempest.lib import exceptions as lib_exc
+from testtools import testcase as tc
+
+from manila_tempest_tests.tests.api import base
+
+
+class PublicSharesNegativeTest(base.BaseSharesMixedTest):
+
+    @classmethod
+    def resource_setup(cls):
+        super(PublicSharesNegativeTest, cls).resource_setup()
+        # create share_type
+        share_type = cls._create_share_type()
+        share_type_id = share_type['id']
+        # create a public share - manila's default RBAC only allows
+        # administrator users operating at system scope to create public shares
+        cls.share = cls.create_share(
+            name='public_share',
+            description='public_share_desc',
+            share_type_id=share_type_id,
+            is_public=True,
+            metadata={'key': 'value'},
+            client=cls.admin_shares_v2_client
+        )
+
+    @decorators.idempotent_id('255011c0-4ed9-4174-bb13-8bbd06a62529')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_update_share_with_wrong_public_value(self):
+        self.assertRaises(lib_exc.BadRequest,
+                          self.admin_shares_v2_client.update_share,
+                          self.share["id"],
+                          is_public="truebar")
+
+    @decorators.idempotent_id('3443493b-f56a-4faa-9968-e7cbb0d2802f')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_update_other_tenants_public_share(self):
+        self.assertRaises(lib_exc.Forbidden,
+                          self.alt_shares_v2_client.update_share,
+                          self.share["id"],
+                          name="new_name")
+
+    @decorators.idempotent_id('68d1f1bc-16e4-4086-8982-7e44ca6bdc4d')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_delete_other_tenants_public_share(self):
+        self.assertRaises(lib_exc.Forbidden,
+                          self.alt_shares_v2_client.delete_share,
+                          self.share['id'])
+
+    @decorators.idempotent_id('1f9e5d84-0885-4a4b-9196-9031a1c01508')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_set_metadata_of_other_tenants_public_share(self):
+        self.assertRaises(lib_exc.Forbidden,
+                          self.alt_shares_v2_client.set_metadata,
+                          self.share['id'],
+                          {'key': 'value'})
+
+    @decorators.idempotent_id('fed7a935-9699-43a1-854e-67b61ba6233e')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_update_metadata_of_other_tenants_public_share(self):
+        self.assertRaises(lib_exc.Forbidden,
+                          self.alt_shares_v2_client.update_all_metadata,
+                          self.share['id'],
+                          {'key': 'value'})
+
+    @decorators.idempotent_id('bd62adeb-73c2-4b04-8812-80b479cd5c3b')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_delete_metadata_of_other_tenants_public_share(self):
+        self.assertRaises(lib_exc.Forbidden,
+                          self.alt_shares_v2_client.delete_metadata,
+                          self.share['id'],
+                          'key')
diff --git a/manila_tempest_tests/tests/api/test_quotas.py b/manila_tempest_tests/tests/api/test_quotas.py
index 958cb63..1052cd5 100644
--- a/manila_tempest_tests/tests/api/test_quotas.py
+++ b/manila_tempest_tests/tests/api/test_quotas.py
@@ -33,10 +33,14 @@
 class SharesQuotasTest(base.BaseSharesTest):
 
     @classmethod
-    def resource_setup(cls):
+    def skip_checks(cls):
+        super(SharesQuotasTest, cls).skip_checks()
         if not CONF.share.run_quota_tests:
             msg = "Quota tests are disabled."
             raise cls.skipException(msg)
+
+    @classmethod
+    def resource_setup(cls):
         super(SharesQuotasTest, cls).resource_setup()
         cls.user_id = cls.shares_v2_client.user_id
         cls.tenant_id = cls.shares_v2_client.tenant_id
@@ -88,14 +92,14 @@
             self.assertGreater(int(quotas["replica_gigabytes"]), -2)
 
     @ddt.data(
-        *itertools.product(set(
+        *itertools.product(utils.deduplicate(
             ["2.25", "2.53", CONF.share.max_api_microversion]), (True, False))
     )
     @ddt.unpack
     @decorators.idempotent_id('795614f6-4a18-47d5-b817-0b294e9d4c48')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     def test_show_quotas_detail(self, microversion, with_user):
-        self.skip_if_microversion_not_supported(microversion)
+        utils.skip_if_microversion_not_supported(microversion)
         quota_args = {"tenant_id": self.tenant_id, "version": microversion, }
         keys = ['gigabytes', 'snapshot_gigabytes', 'shares',
                 'snapshots', 'share_networks']
@@ -115,7 +119,7 @@
 
     @decorators.idempotent_id('7bd5ac42-9fcb-477f-a253-02cde2bde661')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_not_supported(PRE_SHARE_REPLICAS_MICROVERSION)
+    @utils.skip_if_microversion_not_supported(PRE_SHARE_REPLICAS_MICROVERSION)
     def test_quota_detail_2_52_no_share_replica_quotas(self):
         quota_args = {"tenant_id": self.tenant_id,
                       "version": PRE_SHARE_REPLICAS_MICROVERSION}
diff --git a/manila_tempest_tests/tests/api/test_quotas_negative.py b/manila_tempest_tests/tests/api/test_quotas_negative.py
index 6b6f1b2..42c9ea7 100644
--- a/manila_tempest_tests/tests/api/test_quotas_negative.py
+++ b/manila_tempest_tests/tests/api/test_quotas_negative.py
@@ -28,11 +28,11 @@
 class SharesQuotasNegativeTest(base.BaseSharesTest):
 
     @classmethod
-    def resource_setup(cls):
+    def skip_checks(cls):
+        super(SharesQuotasNegativeTest, cls).skip_checks()
         if not CONF.share.run_quota_tests:
             msg = "Quota tests are disabled."
             raise cls.skipException(msg)
-        super(SharesQuotasNegativeTest, cls).resource_setup()
 
     @decorators.idempotent_id('d0dfe81d-8e8c-4847-a55f-95ba8a3d922c')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
diff --git a/manila_tempest_tests/tests/api/test_replication.py b/manila_tempest_tests/tests/api/test_replication.py
index 5eb5e2a..881262f 100644
--- a/manila_tempest_tests/tests/api/test_replication.py
+++ b/manila_tempest_tests/tests/api/test_replication.py
@@ -20,6 +20,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -129,8 +130,9 @@
         replica = self.create_share_replica(share["id"], self.replica_zone,
                                             cleanup=False)
         # Wait for replica state to update after creation
-        self.shares_v2_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
         # Promote the first in_sync replica to active state
         promoted_replica = self.promote_share_replica(replica['id'])
@@ -165,7 +167,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
     @testtools.skipIf(
         not CONF.share.multitenancy_enabled, "Only for multitenancy.")
-    @base.skip_if_microversion_not_supported("2.51")
+    @utils.skip_if_microversion_not_supported("2.51")
     def test_add_delete_share_replica_different_subnet(self):
         # Create new subnet in replica az
         subnet = utils.share_network_get_default_subnet(self.share_network)
@@ -191,16 +193,18 @@
         access_type, access_to = self._get_access_rule_data_from_config()
         rule = self.shares_v2_client.create_access_rule(
             self.shares[0]["id"], access_type, access_to, 'ro')
-        self.shares_v2_client.wait_for_access_rule_status(
-            self.shares[0]["id"], rule["id"], constants.RULE_STATE_ACTIVE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.shares[0]["id"],
+            constants.RULE_STATE_ACTIVE, resource_name='access_rule',
+            rule_id=rule["id"])
 
         # Create the replica
         self._verify_create_replica()
 
         # Verify access_rules_status transitions to 'active' state.
-        self.shares_v2_client.wait_for_share_status(
-            self.shares[0]["id"], constants.RULE_STATE_ACTIVE,
-            status_attr='access_rules_status')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.shares[0]["id"],
+            constants.RULE_STATE_ACTIVE, status_attr='access_rules_status')
 
         # Delete rule and wait for deletion
         self.shares_v2_client.delete_access_rule(self.shares[0]["id"],
@@ -219,9 +223,9 @@
         self.shares_v2_client.create_access_rule(
             self.shares[0]["id"], access_type, access_to, 'ro')
 
-        self.shares_v2_client.wait_for_share_status(
-            self.shares[0]["id"], constants.RULE_STATE_ACTIVE,
-            status_attr='access_rules_status')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.shares[0]["id"],
+            constants.RULE_STATE_ACTIVE, status_attr='access_rules_status')
 
         # Delete the replica
         self.delete_share_replica(share_replica["id"])
@@ -279,8 +283,9 @@
         access_type, access_to = self._get_access_rule_data_from_config()
         rule = self.shares_v2_client.create_access_rule(
             share["id"], access_type, access_to, 'ro')
-        self.shares_v2_client.wait_for_access_rule_status(
-            share["id"], rule["id"], constants.RULE_STATE_ACTIVE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share["id"], constants.RULE_STATE_ACTIVE,
+            resource_name='access_rule', rule_id=rule["id"])
 
         original_replica = self.shares_v2_client.list_share_replicas(
             share["id"])[0]
@@ -295,7 +300,7 @@
 
     @decorators.idempotent_id('7904e3c7-e6d0-472d-b9c9-c0772b4f9f1b')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.48")
+    @utils.skip_if_microversion_not_supported("2.48")
     def test_share_type_azs_share_replicas(self):
         az_spec = ', '.join(self.zones)
         self.admin_shares_v2_client.update_share_type_extra_spec(
@@ -337,22 +342,25 @@
         new_replica = self.create_share_replica(share["id"],
                                                 self.replica_zone,
                                                 cleanup_in_class=False)
-        self.shares_v2_client.wait_for_share_replica_status(
-            new_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, new_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         # Promote the new replica to active and verify the replica states
         self.promote_share_replica(new_replica['id'])
         self._verify_active_replica_count(share["id"])
-        self.shares_v2_client.wait_for_share_replica_status(
-            original_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, original_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         # Promote the original replica back to active
         self.promote_share_replica(original_replica['id'])
         self._verify_active_replica_count(share["id"])
-        self.shares_v2_client.wait_for_share_replica_status(
-            new_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, new_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
     @decorators.idempotent_id('1452156b-75a5-4f3c-a921-834732a03b0a')
diff --git a/manila_tempest_tests/tests/api/test_replication_export_locations.py b/manila_tempest_tests/tests/api/test_replication_export_locations.py
index c9857e6..1b52a7a 100644
--- a/manila_tempest_tests/tests/api/test_replication_export_locations.py
+++ b/manila_tempest_tests/tests/api/test_replication_export_locations.py
@@ -18,6 +18,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -124,7 +125,7 @@
     @ddt.data(*utils.deduplicate(['2.46', '2.47', LATEST_MICROVERSION]))
     def test_replicated_share_export_locations(self, version):
         """Test behavior changes in the share export locations API at 2.47"""
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         share, replica, primary_replica_exports, replica_exports = (
             self._create_share_and_replica_get_exports()
         )
@@ -145,14 +146,15 @@
         (constants.REPLICATION_STYLE_READABLE, constants.REPLICATION_STYLE_DR),
         'Promotion of secondary not supported in writable replication style.')
     def test_replicated_share_export_locations_with_promotion(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         share, replica, primary_replica_exports, replica_exports = (
             self._create_share_and_replica_get_exports(cleanup_replica=False)
         )
         primary_replica = self.shares_v2_client.get_share_replica(
             primary_replica_exports[0]['share_instance_id'])
-        self.shares_v2_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         # Share export locations list API
diff --git a/manila_tempest_tests/tests/api/test_replication_negative.py b/manila_tempest_tests/tests/api/test_replication_negative.py
index c437737..eac1999 100644
--- a/manila_tempest_tests/tests/api/test_replication_negative.py
+++ b/manila_tempest_tests/tests/api/test_replication_negative.py
@@ -21,6 +21,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -146,9 +147,10 @@
         # Set replica state to out of sync
         self.admin_client.reset_share_replica_state(
             replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC)
-        self.shares_v2_client.wait_for_share_replica_status(
-            replica['id'], constants.REPLICATION_STATE_OUT_OF_SYNC,
-            status_attr='replica_state')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, replica['id'],
+            constants.REPLICATION_STATE_OUT_OF_SYNC,
+            resource_name='share_replica', status_attr='replica_state')
         # Try promoting the first out_of_sync replica to active state
         self.assertRaises(lib_exc.Forbidden,
                           self.shares_v2_client.promote_share_replica,
@@ -175,8 +177,9 @@
         replica = self.create_share_replica(share["id"], self.replica_zone,
                                             cleanup_in_class=False)
         # By default, 'writable' replica is expected to be in active state
-        self.shares_v2_client.wait_for_share_replica_status(
-            replica["id"], constants.REPLICATION_STATE_ACTIVE,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, replica["id"],
+            constants.REPLICATION_STATE_ACTIVE, resource_name='share_replica',
             status_attr='replica_state')
 
         # Try promoting the replica
@@ -204,7 +207,7 @@
                           CONF.share.run_driver_assisted_migration_tests,
                           "Share migration tests are disabled.")
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     def test_migration_of_replicated_share(self):
         pools = self.admin_client.list_pools(detail=True)['pools']
         hosts = [p['name'] for p in pools]
@@ -223,7 +226,7 @@
 
     @decorators.idempotent_id('bf01bcfc-57cb-4e56-957f-8aa9f1b9be1b')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.48")
+    @utils.skip_if_microversion_lt("2.48")
     def test_try_add_replica_share_type_azs_unsupported_az(self):
         self.admin_shares_v2_client.update_share_type_extra_spec(
             self.share_type['id'], 'availability_zones', 'non-existent az')
@@ -239,7 +242,7 @@
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
     @testtools.skipIf(
         not CONF.share.multitenancy_enabled, "Only for multitenancy.")
-    @base.skip_if_microversion_lt("2.51")
+    @utils.skip_if_microversion_lt("2.51")
     def test_try_add_replica_nonexistent_subnet(self):
         # Create a new share network only for a specific az
         data = self.generate_share_network_data()
diff --git a/manila_tempest_tests/tests/api/test_replication_snapshots.py b/manila_tempest_tests/tests/api/test_replication_snapshots.py
index a812679..a150419 100644
--- a/manila_tempest_tests/tests/api/test_replication_snapshots.py
+++ b/manila_tempest_tests/tests/api/test_replication_snapshots.py
@@ -19,6 +19,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -86,8 +87,9 @@
                                                   self.replica_zone,
                                                   cleanup=False)
         self.addCleanup(self.delete_share_replica, original_replica['id'])
-        self.shares_v2_client.wait_for_share_replica_status(
-            share_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         snapshot = self.create_snapshot_wait_for_active(share["id"])
@@ -122,13 +124,15 @@
                                                   self.replica_zone,
                                                   cleanup=False)
         self.addCleanup(self.delete_share_replica, original_replica['id'])
-        self.shares_v2_client.wait_for_share_replica_status(
-            share_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         # Wait for snapshot1 to become available
-        self.shares_v2_client.wait_for_snapshot_status(
-            snapshot['id'], "available")
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, snapshot['id'], "available",
+            resource_name='snapshot')
 
         self.promote_share_replica(share_replica['id'])
         self.delete_share_replica(original_replica['id'])
@@ -162,15 +166,17 @@
                                                   self.replica_zone,
                                                   cleanup=False)
         self.addCleanup(self.delete_share_replica, original_replica['id'])
-        self.shares_v2_client.wait_for_share_replica_status(
-            share_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         snapshot2 = self.create_snapshot_wait_for_active(share["id"])
 
         # Wait for snapshot1 to become available
-        self.shares_v2_client.wait_for_snapshot_status(
-            snapshot1['id'], "available")
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, snapshot1['id'], "available",
+            resource_name='snapshot')
 
         self.promote_share_replica(share_replica['id'])
         # Remove the original active replica to ensure that snapshot is
@@ -205,8 +211,9 @@
                                   share_network_id=self.sn_id)
         share_replica = self.create_share_replica(share["id"],
                                                   self.replica_zone)
-        self.shares_v2_client.wait_for_share_replica_status(
-            share_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
         snapshot = self.create_snapshot_wait_for_active(share["id"])
         self.shares_v2_client.delete_snapshot(snapshot['id'])
@@ -234,8 +241,9 @@
                                                   self.replica_zone,
                                                   cleanup=False)
         self.addCleanup(self.delete_share_replica, original_replica['id'])
-        self.shares_v2_client.wait_for_share_replica_status(
-            share_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
         self.promote_share_replica(share_replica['id'])
         # Delete the demoted replica so promoted replica can be cleaned
diff --git a/manila_tempest_tests/tests/api/test_revert_to_snapshot.py b/manila_tempest_tests/tests/api/test_revert_to_snapshot.py
index 6a915c6..4e7d0ca 100644
--- a/manila_tempest_tests/tests/api/test_revert_to_snapshot.py
+++ b/manila_tempest_tests/tests/api/test_revert_to_snapshot.py
@@ -20,6 +20,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
@@ -115,8 +116,9 @@
             self.share['id'],
             snapshot['id'],
             version=version)
-        self.shares_v2_client.wait_for_share_status(self.share['id'],
-                                                    constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share['id'],
+            constants.STATUS_AVAILABLE)
 
     @decorators.idempotent_id('09bd9942-7ef9-4d24-b2dd-f83bdda27b50')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
@@ -137,8 +139,9 @@
         self.shares_v2_client.revert_to_snapshot(self.share['id'],
                                                  snapshot1['id'],
                                                  version=version)
-        self.shares_v2_client.wait_for_share_status(self.share['id'],
-                                                    constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share['id'],
+            constants.STATUS_AVAILABLE)
 
     @decorators.idempotent_id('146de138-d351-49dc-a13a-5cdbed40b9ac')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
@@ -158,8 +161,9 @@
 
         share_replica = self.create_share_replica(share["id"],
                                                   self.replica_zone)
-        self.shares_v2_client.wait_for_share_replica_status(
-            share_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
 
         snapshot = self.create_snapshot_wait_for_active(share["id"])
@@ -168,8 +172,9 @@
             share['id'],
             snapshot['id'],
             version=version)
-        self.shares_v2_client.wait_for_share_status(share['id'],
-                                                    constants.STATUS_AVAILABLE)
-        self.shares_v2_client.wait_for_share_replica_status(
-            share_replica['id'], constants.REPLICATION_STATE_IN_SYNC,
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'], constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share_replica['id'],
+            constants.REPLICATION_STATE_IN_SYNC, resource_name='share_replica',
             status_attr='replica_state')
diff --git a/manila_tempest_tests/tests/api/test_rules.py b/manila_tempest_tests/tests/api/test_rules.py
index 2a2420a..18e8472 100644
--- a/manila_tempest_tests/tests/api/test_rules.py
+++ b/manila_tempest_tests/tests/api/test_rules.py
@@ -22,6 +22,7 @@
 import testtools
 from testtools import testcase as tc
 
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -55,12 +56,13 @@
         self.assertEqual("queued_to_apply", rule['state'])
 
     if utils.is_microversion_le(version, '2.9'):
-        self.shares_client.wait_for_access_rule_status(
-            self.share["id"], rule["id"], "active")
+        waiters.wait_for_resource_status(
+            self.shares_client, self.share["id"], "active",
+            resource_name='access_rule', rule_id=rule["id"])
     else:
-        self.shares_v2_client.wait_for_share_status(
-            self.share["id"], "active", status_attr='access_rules_status',
-            version=version)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share["id"], "active",
+            status_attr='access_rules_status', version=version)
         # If the 'access_rules_status' transitions to 'active',
         # rule state must too
         rules = self.shares_v2_client.list_access_rules(self.share['id'])
@@ -137,15 +139,17 @@
             self.assertEqual("queued_to_apply", rule['state'])
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                self.share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         # delete rule and wait for deletion
         if utils.is_microversion_eq(version, '1.0'):
@@ -190,15 +194,17 @@
             self.assertEqual("queued_to_apply", rule['state'])
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                self.share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         # delete rule and wait for deletion
         if utils.is_microversion_eq(version, '1.0'):
@@ -291,15 +297,17 @@
             self.assertEqual("queued_to_apply", rule['state'])
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                self.share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         # delete rule and wait for deletion
         if utils.is_microversion_eq(version, '1.0'):
@@ -393,15 +401,17 @@
             self.assertEqual("queued_to_apply", rule['state'])
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                self.share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         # delete rule
         if utils.is_microversion_eq(version, '1.0'):
@@ -441,15 +451,17 @@
             self.assertEqual("queued_to_apply", rule['state'])
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                self.share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         if utils.is_microversion_eq(version, '1.0'):
             self.shares_client.delete_access_rule(self.share["id"], rule["id"])
@@ -494,7 +506,7 @@
     @decorators.idempotent_id('4e636fd2-26ef-4b63-96eb-77860a8b6cdf')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
     @ddt.data(*itertools.product(
-        set(['2.13', '2.27', '2.28', LATEST_MICROVERSION]),
+        utils.deduplicate(['2.13', '2.27', '2.28', LATEST_MICROVERSION]),
         ("alice", "alice_bob", "alice bob"),
         ('rw', 'ro')))
     @ddt.unpack
@@ -506,8 +518,9 @@
         self.assertEqual(access_level, rule['access_level'])
         for key in ('deleted', 'deleted_at', 'instance_mappings'):
             self.assertNotIn(key, rule.keys())
-        self.shares_v2_client.wait_for_access_rule_status(
-            self.share["id"], rule["id"], "active")
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share["id"], "active",
+            resource_name='access_rule', rule_id=rule["id"])
 
         self.shares_v2_client.delete_access_rule(
             self.share["id"], rule["id"], version=version)
@@ -520,8 +533,9 @@
         # Grant access to the share
         access1 = self.shares_v2_client.create_access_rule(
             self.share['id'], self.access_type, self.access_to, 'rw')
-        self.shares_v2_client.wait_for_access_rule_status(
-            self.share['id'], access1['id'], 'active')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share["id"], "active",
+            resource_name='access_rule', rule_id=access1["id"])
 
         # Create a new user in the current project
         project = self.os_admin.projects_client.show_project(
@@ -537,8 +551,9 @@
         # used in access1
         access2 = user_client.shares_v2_client.create_access_rule(
             share2['id'], self.access_type, self.access_to, 'rw')
-        user_client.shares_v2_client.wait_for_access_rule_status(
-            share2['id'], access2['id'], 'active')
+        waiters.wait_for_resource_status(
+            user_client.shares_v2_client, share2['id'], "active",
+            resource_name='access_rule', rule_id=access2['id'])
 
 
 @ddt.ddt
@@ -580,7 +595,7 @@
     @ddt.data(*utils.deduplicate(
         ['1.0', '2.9', '2.27', '2.28', '2.45', LATEST_MICROVERSION]))
     def test_list_access_rules(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         if (utils.is_microversion_lt(version, '2.13') and
                 CONF.share.enable_cephx_rules_for_protocols):
             msg = ("API version %s does not support cephx access type, need "
@@ -614,15 +629,17 @@
             self.assertEqual("queued_to_apply", rule['state'])
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name="access_rule", rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name="access_rule", rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                self.share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         # list rules
         if utils.is_microversion_eq(version, '1.0'):
@@ -699,15 +716,17 @@
             self.assertEqual("queued_to_apply", rule['state'])
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name="access_rule", rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name="access_rule", rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         # delete share
         if utils.is_microversion_eq(version, '1.0'):
diff --git a/manila_tempest_tests/tests/api/test_rules_negative.py b/manila_tempest_tests/tests/api/test_rules_negative.py
index 63ffb91..fd736a3 100644
--- a/manila_tempest_tests/tests/api/test_rules_negative.py
+++ b/manila_tempest_tests/tests/api/test_rules_negative.py
@@ -21,6 +21,8 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
+from manila_tempest_tests import share_exceptions
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -103,15 +105,17 @@
                 self.share["id"], access_type, access_to, version=version)
 
         if utils.is_microversion_eq(version, '1.0'):
-            self.shares_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_client, self.share["id"], "active",
+                resource_name='access_rule', rule_id=rule["id"])
         elif utils.is_microversion_eq(version, '2.9'):
-            self.shares_v2_client.wait_for_access_rule_status(
-                self.share["id"], rule["id"], "active")
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                resource_name="access_rule", rule_id=rule["id"])
         else:
-            self.shares_v2_client.wait_for_share_status(
-                self.share["id"], "active", status_attr='access_rules_status',
-                version=version)
+            waiters.wait_for_resource_status(
+                self.shares_v2_client, self.share["id"], "active",
+                status_attr='access_rules_status', version=version)
 
         # try create duplicate of rule
         if utils.is_microversion_eq(version, '1.0'):
@@ -153,8 +157,9 @@
             self.share["id"], "ip", access_to)
         self.addCleanup(self.shares_v2_client.delete_access_rule,
                         self.share["id"], rule['id'])
-        self.shares_v2_client.wait_for_share_status(
-            self.share["id"], "active", status_attr='access_rules_status')
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.share["id"], "active",
+            status_attr='access_rules_status')
 
         self.assertRaises(lib_exc.BadRequest,
                           self.shares_v2_client.create_access_rule,
@@ -184,8 +189,8 @@
         share = self.create_share(share_type_id=share_type['id'],
                                   cleanup_in_class=False,
                                   wait_for_status=False)
-        self.shares_v2_client.wait_for_share_status(
-            share['id'], constants.STATUS_ERROR)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'], constants.STATUS_ERROR)
         self.assertRaises(lib_exc.BadRequest,
                           self.admin_client.create_access_rule,
                           share["id"], access_type, access_to)
@@ -398,6 +403,17 @@
                           self.shares_v2_client.create_access_rule,
                           self.share["id"], self.access_type, access_to)
 
+    @decorators.idempotent_id('16b7d848-2f7c-4709-85a3-2dfb4576cc59')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_create_access_rule_cephx_admin_user(self):
+        """CVE-2020-27781 - using admin in cephx rule must be disallowed"""
+
+        self.assertRaises(share_exceptions.AccessRuleBuildErrorException,
+                          self.allow_access,
+                          self.share["id"],
+                          access_type=self.access_type,
+                          access_to='admin')
+
     @decorators.idempotent_id('dd8be44c-c7e8-42fe-b81c-095a1c66730c')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
     def test_create_access_rule_cephx_with_wrong_level(self):
@@ -429,7 +445,7 @@
         self.assertEqual('error', share_alt_updated['access_rules_status'])
 
     @decorators.idempotent_id('1a9f46f0-d4e1-40ac-8726-aedd0320d583')
-    @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
     def test_can_apply_new_cephx_rules_when_one_is_in_error_state(self):
         # Create share on "primary" tenant
         share_primary = self.create_share()
@@ -458,8 +474,9 @@
         # Check share's access_rules_status has transitioned to "active" status
         self.alt_shares_v2_client.delete_access_rule(
             share_alt['id'], rule1['id'])
-        self.alt_shares_v2_client.wait_for_share_status(
-            share_alt['id'], 'active', status_attr='access_rules_status')
+        waiters.wait_for_resource_status(
+            self.alt_shares_v2_client, share_alt['id'], 'active',
+            status_attr='access_rules_status')
 
 
 @ddt.ddt
@@ -467,8 +484,8 @@
     # Tests independent from rule type and share protocol
 
     @classmethod
-    def resource_setup(cls):
-        super(ShareRulesNegativeTest, cls).resource_setup()
+    def skip_checks(cls):
+        super(ShareRulesNegativeTest, cls).skip_checks()
         if not (any(p in CONF.share.enable_ip_rules_for_protocols
                     for p in cls.protocols) or
                 any(p in CONF.share.enable_user_rules_for_protocols
@@ -479,6 +496,10 @@
                     for p in cls.protocols)):
             cls.message = "Rule tests are disabled"
             raise cls.skipException(cls.message)
+
+    @classmethod
+    def resource_setup(cls):
+        super(ShareRulesNegativeTest, cls).resource_setup()
         # create share type
         cls.share_type = cls._create_share_type()
         cls.share_type_id = cls.share_type['id']
diff --git a/manila_tempest_tests/tests/api/test_security_services.py b/manila_tempest_tests/tests/api/test_security_services.py
index 81645b5..5755473 100644
--- a/manila_tempest_tests/tests/api/test_security_services.py
+++ b/manila_tempest_tests/tests/api/test_security_services.py
@@ -48,7 +48,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data(*utils.deduplicate(['1.0', '2.42', '2.44', LATEST_MICROVERSION]))
     def test_list_security_services_with_detail(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         with_ou = True if utils.is_microversion_ge(version, '2.44') else False
         if utils.is_microversion_ge(version, '2.0'):
             listed = self.shares_v2_client.list_security_services(
@@ -171,7 +171,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
     @ddt.data(*utils.deduplicate(['1.0', '2.43', '2.44', LATEST_MICROVERSION]))
     def test_get_security_service(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         with_ou = True if utils.is_microversion_ge(version, '2.44') else False
         data = self.generate_security_service_data(set_ou=with_ou)
 
@@ -225,6 +225,7 @@
         sn = self.shares_client.get_share_network(
             self.shares_client.share_network_id)
         fresh_sn = self.create_share_network(
+            add_security_services=False,
             neutron_net_id=sn["neutron_net_id"],
             neutron_subnet_id=sn["neutron_subnet_id"])
 
diff --git a/manila_tempest_tests/tests/api/test_security_services_mapping_negative.py b/manila_tempest_tests/tests/api/test_security_services_mapping_negative.py
index f504e5c..32323fc 100644
--- a/manila_tempest_tests/tests/api/test_security_services_mapping_negative.py
+++ b/manila_tempest_tests/tests/api/test_security_services_mapping_negative.py
@@ -33,7 +33,8 @@
     @classmethod
     def resource_setup(cls):
         super(SecServicesMappingNegativeTest, cls).resource_setup()
-        cls.sn = cls.create_share_network(cleanup_in_class=True)
+        cls.sn = cls.create_share_network(cleanup_in_class=True,
+                                          add_security_services=False)
         cls.share_net_info = (
             utils.share_network_get_default_subnet(cls.sn)
             if utils.share_network_subnets_are_supported() else cls.sn)
@@ -115,6 +116,7 @@
         sn = self.shares_client.get_share_network(
             self.shares_client.share_network_id)
         fresh_sn = self.create_share_network(
+            add_security_services=False,
             neutron_net_id=sn["neutron_net_id"],
             neutron_subnet_id=sn["neutron_subnet_id"])
 
@@ -147,7 +149,8 @@
         # create share network
         data = self.generate_share_network_data()
 
-        sn = self.create_share_network(client=self.cl, **data)
+        sn = self.create_share_network(client=self.cl,
+                                       add_security_services=False, **data)
         self.assertDictContainsSubset(data, sn)
 
         # create security services with same type
@@ -173,7 +176,8 @@
         # create share network
         data = self.generate_share_network_data()
 
-        sn = self.create_share_network(client=self.cl, **data)
+        sn = self.create_share_network(client=self.cl,
+                                       add_security_services=False, **data)
         self.assertDictContainsSubset(data, sn)
 
         # create security service
diff --git a/manila_tempest_tests/tests/api/test_security_services_negative.py b/manila_tempest_tests/tests/api/test_security_services_negative.py
index 3539634..917860d 100644
--- a/manila_tempest_tests/tests/api/test_security_services_negative.py
+++ b/manila_tempest_tests/tests/api/test_security_services_negative.py
@@ -100,6 +100,7 @@
         sn = self.shares_client.get_share_network(
             self.shares_client.share_network_id)
         fresh_sn = self.create_share_network(
+            add_security_services=False,
             neutron_net_id=sn["neutron_net_id"],
             neutron_subnet_id=sn["neutron_subnet_id"])
 
diff --git a/manila_tempest_tests/tests/api/test_share_group_actions.py b/manila_tempest_tests/tests/api/test_share_group_actions.py
index b52d7f6..6b5ab8d 100644
--- a/manila_tempest_tests/tests/api/test_share_group_actions.py
+++ b/manila_tempest_tests/tests/api/test_share_group_actions.py
@@ -111,7 +111,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_get_share_group(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         # Get share group
         share_group = self.shares_v2_client.get_share_group(
@@ -166,7 +166,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_list_share_groups(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         # List share groups
         share_groups = self.shares_v2_client.list_share_groups(
@@ -198,7 +198,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_list_share_groups_with_detail_min(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
         params = None
         if utils.is_microversion_ge(version, '2.36'):
             params = {'name~': 'tempest', 'description~': 'tempest'}
@@ -259,7 +259,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_get_share_group_snapshot(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         # Get share group snapshot
         sg_snapshot = self.shares_v2_client.get_share_group_snapshot(
@@ -317,7 +317,7 @@
                             LATEST_MICROVERSION]))
     def test_create_share_group_from_populated_share_group_snapshot(self,
                                                                     version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         sg_snapshot = self.shares_v2_client.get_share_group_snapshot(
             self.sg_snapshot['id'],
@@ -422,7 +422,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_update_share_group(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         # Get share_group
         share_group = self.shares_v2_client.get_share_group(
@@ -463,7 +463,7 @@
                             constants.SHARE_GROUPS_GRADUATION_VERSION,
                             LATEST_MICROVERSION]))
     def test_create_update_read_share_group_with_unicode(self, version):
-        self.skip_if_microversion_not_supported(version)
+        utils.skip_if_microversion_not_supported(version)
 
         value1 = u'ಠ_ಠ'
         value2 = u'ಠ_ರೃ'
diff --git a/manila_tempest_tests/tests/api/test_share_groups.py b/manila_tempest_tests/tests/api/test_share_groups.py
index 4c655e9..8dc7c06 100644
--- a/manila_tempest_tests/tests/api/test_share_groups.py
+++ b/manila_tempest_tests/tests/api/test_share_groups.py
@@ -196,7 +196,7 @@
             new_share_group['share_network_id'],
             msg)
 
-    @base.skip_if_microversion_lt("2.34")
+    @utils.skip_if_microversion_lt("2.34")
     @decorators.idempotent_id('14fd6d88-87ff-4af2-ad17-f95dbd8dcd61')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     @ddt.data(
diff --git a/manila_tempest_tests/tests/api/test_share_groups_negative.py b/manila_tempest_tests/tests/api/test_share_groups_negative.py
index bfe07c6..8f67fbe 100644
--- a/manila_tempest_tests/tests/api/test_share_groups_negative.py
+++ b/manila_tempest_tests/tests/api/test_share_groups_negative.py
@@ -290,7 +290,7 @@
             share_type_ids=[self.share_type_id],
             version=constants.MIN_SHARE_GROUP_MICROVERSION)
 
-    @base.skip_if_microversion_lt("2.34")
+    @utils.skip_if_microversion_lt("2.34")
     @decorators.idempotent_id('64527564-9cd6-42db-8897-910f4fc1a151')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
     def test_create_sg_and_share_with_different_azs(self):
diff --git a/manila_tempest_tests/tests/api/test_share_network_subnets_negative.py b/manila_tempest_tests/tests/api/test_share_network_subnets_negative.py
index a374f4e..82d27ca 100644
--- a/manila_tempest_tests/tests/api/test_share_network_subnets_negative.py
+++ b/manila_tempest_tests/tests/api/test_share_network_subnets_negative.py
@@ -21,6 +21,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -171,8 +172,8 @@
 
         # Create a share into the share network
         share = self.shares_v2_client.create_share(**args)
-        self.shares_v2_client.wait_for_share_status(
-            share['id'], constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'], constants.STATUS_AVAILABLE)
         share = self.shares_v2_client.get_share(share['id'])
 
         # Gets the export locations to be used in the future
@@ -202,8 +203,9 @@
         )
 
         # Do some necessary cleanup
-        self.shares_v2_client.wait_for_share_status(
-            managed_share['id'], constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, managed_share['id'],
+            constants.STATUS_AVAILABLE)
         self.shares_client.delete_share(managed_share['id'])
         self.shares_v2_client.wait_for_resource_deletion(
             share_id=managed_share["id"])
@@ -254,8 +256,8 @@
 
         # Create a share into the share network
         share = self.shares_v2_client.create_share(**args)
-        self.shares_v2_client.wait_for_share_status(
-            share['id'], constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'], constants.STATUS_AVAILABLE)
         share = self.admin_shares_v2_client.get_share(share['id'])
         share_server = self.admin_shares_v2_client.show_share_server(
             share['share_server_id']
diff --git a/manila_tempest_tests/tests/api/test_share_networks.py b/manila_tempest_tests/tests/api/test_share_networks.py
index db07591..fa486b4 100644
--- a/manila_tempest_tests/tests/api/test_share_networks.py
+++ b/manila_tempest_tests/tests/api/test_share_networks.py
@@ -111,7 +111,7 @@
 
     @decorators.idempotent_id('bff1356e-70aa-4bbe-b398-cb4dadd8fcb1')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API)
-    @base.skip_if_microversion_lt("2.36")
+    @utils.skip_if_microversion_lt("2.36")
     def test_list_share_networks_like_filter(self):
         valid_filter_opts = {
             'name': 'sn_with_ldap_ss',
@@ -181,6 +181,7 @@
         }
         cls.sn_with_ldap_ss = cls.create_share_network(
             cleanup_in_class=True,
+            add_security_services=False,
             **cls.data_sn_with_ldap_ss)
 
         cls.shares_client.add_sec_service_to_share_network(
@@ -206,6 +207,7 @@
 
         cls.sn_with_kerberos_ss = cls.create_share_network(
             cleanup_in_class=True,
+            add_security_services=False,
             **cls.data_sn_with_kerberos_ss)
 
         cls.shares_client.add_sec_service_to_share_network(
@@ -298,7 +300,7 @@
     @testtools.skipUnless(CONF.share.multitenancy_enabled,
                           "Only for multitenancy.")
     @testtools.skipUnless(CONF.service_available.neutron, "Only with neutron.")
-    @base.skip_if_microversion_lt("2.18")
+    @utils.skip_if_microversion_lt("2.18")
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     def test_gateway_with_neutron(self):
         subnet_client = self.subnets_client
@@ -322,7 +324,7 @@
     @testtools.skipUnless(CONF.share.multitenancy_enabled,
                           "Only for multitenancy.")
     @testtools.skipUnless(CONF.service_available.neutron, "Only with neutron.")
-    @base.skip_if_microversion_lt("2.20")
+    @utils.skip_if_microversion_lt("2.20")
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     def test_mtu_with_neutron(self):
         network_client = self.networks_client
diff --git a/manila_tempest_tests/tests/api/test_share_networks_negative.py b/manila_tempest_tests/tests/api/test_share_networks_negative.py
index 0d9cbec..b9e57de 100644
--- a/manila_tempest_tests/tests/api/test_share_networks_negative.py
+++ b/manila_tempest_tests/tests/api/test_share_networks_negative.py
@@ -20,6 +20,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 CONF = config.CONF
 
@@ -143,7 +144,7 @@
 
     @decorators.idempotent_id('4e71de31-1064-40da-948d-a72063fbd647')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.35")
+    @utils.skip_if_microversion_not_supported("2.35")
     def test_list_shares_with_like_filter_not_exist(self):
         filters = {
             'name~': 'fake_not_exist',
@@ -155,7 +156,7 @@
 
         self.assertEqual(0, len(share_networks))
 
-    @base.skip_if_microversion_lt("2.51")
+    @utils.skip_if_microversion_lt("2.51")
     @decorators.idempotent_id('8a995305-ede9-4002-a9cd-f24ff4d71f63')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
     def test_delete_share_network_contains_more_than_one_subnet(self):
@@ -184,7 +185,7 @@
         default_subnet = share_network['share_network_subnets'][0]
         self.assertIsNone(default_subnet['availability_zone'])
 
-    @base.skip_if_microversion_lt("2.51")
+    @utils.skip_if_microversion_lt("2.51")
     @decorators.idempotent_id('d84c3c5c-5913-42d4-9a66-0d5a78295adb')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
     def test_create_share_network_inexistent_az(self):
diff --git a/manila_tempest_tests/tests/api/test_share_types_negative.py b/manila_tempest_tests/tests/api/test_share_types_negative.py
index 95cc41f..a7d8826 100644
--- a/manila_tempest_tests/tests/api/test_share_types_negative.py
+++ b/manila_tempest_tests/tests/api/test_share_types_negative.py
@@ -23,6 +23,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 
 CONF = config.CONF
@@ -79,7 +80,7 @@
                           self.st['id'],
                           self.shares_client.tenant_id)
 
-    @base.skip_if_microversion_lt("2.50")
+    @utils.skip_if_microversion_lt("2.50")
     @decorators.idempotent_id('4a22945c-8988-43a1-88c9-eb86e6abcd8e')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
     @ddt.data(
@@ -100,7 +101,7 @@
                           st_id, st_name, st_is_public, st_description,
                           version)
 
-    @base.skip_if_microversion_lt("2.50")
+    @utils.skip_if_microversion_lt("2.50")
     @decorators.idempotent_id('7193465a-ed8e-44d5-9ca9-4e8a3c5958e0')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
     @ddt.data('2.50', LATEST_MICROVERSION)
diff --git a/manila_tempest_tests/tests/api/test_shares.py b/manila_tempest_tests/tests/api/test_shares.py
index 48461b1..8802f4c 100644
--- a/manila_tempest_tests/tests/api/test_shares.py
+++ b/manila_tempest_tests/tests/api/test_shares.py
@@ -30,11 +30,15 @@
     protocol = "nfs"
 
     @classmethod
-    def resource_setup(cls):
-        super(SharesNFSTest, cls).resource_setup()
+    def skip_checks(cls):
+        super(SharesNFSTest, cls).skip_checks()
         if cls.protocol not in CONF.share.enable_protocols:
             message = "%s tests are disabled" % cls.protocol
             raise cls.skipException(message)
+
+    @classmethod
+    def resource_setup(cls):
+        super(SharesNFSTest, cls).resource_setup()
         # create share_type
         cls.share_type = cls._create_share_type()
         cls.share_type_id = cls.share_type['id']
@@ -112,16 +116,6 @@
             detailed_elements.add('progress')
             self.assertTrue(detailed_elements.issubset(share.keys()), msg)
 
-        # This check will ensure that when a share creation request is handled,
-        # if the driver has the "driver handles share servers" option enabled,
-        # that a share server will be created, otherwise, not.
-        share_get = self.admin_shares_v2_client.get_share(share['id'])
-        share_server = share_get['share_server_id']
-        if CONF.share.multitenancy_enabled:
-            self.assertNotEmpty(share_server)
-        else:
-            self.assertEmpty(share_server)
-
         # Delete share
         self.shares_v2_client.delete_share(share['id'])
         self.shares_v2_client.wait_for_resource_deletion(share_id=share['id'])
diff --git a/manila_tempest_tests/tests/api/test_shares_actions.py b/manila_tempest_tests/tests/api/test_shares_actions.py
index 7f48ee1..12af9f5 100644
--- a/manila_tempest_tests/tests/api/test_shares_actions.py
+++ b/manila_tempest_tests/tests/api/test_shares_actions.py
@@ -22,6 +22,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -368,7 +369,7 @@
 
     @decorators.idempotent_id('f446e8cb-5bef-45ac-8b87-f4136f44ca69')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.36")
+    @utils.skip_if_microversion_lt("2.36")
     def test_list_shares_with_detail_filter_by_existed_description(self):
         # list shares by description, at least one share is expected
         params = {"description": self.share_desc}
@@ -377,7 +378,7 @@
 
     @decorators.idempotent_id('1276b97b-cf46-4953-973f-f995985a1ce4')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.36")
+    @utils.skip_if_microversion_lt("2.36")
     def test_list_shares_with_detail_filter_by_inexact_name(self):
         # list shares by name, at least one share is expected
         params = {"name~": 'tempest-share'}
@@ -427,47 +428,13 @@
 
     @decorators.idempotent_id('0019afa2-fae2-417f-a7e0-2af665a966b0')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.42")
+    @utils.skip_if_microversion_lt("2.42")
     def test_list_shares_with_detail_with_count(self):
         # list shares by name, at least one share is expected
         params = {"with_count": 'true'}
         shares = self.shares_v2_client.list_shares_with_detail(params)
         self.assertGreater(shares["count"], 0)
 
-    @decorators.idempotent_id('557a0474-9e30-47b4-a766-19e2afb13e66')
-    @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
-    def test_list_shares_public_with_detail(self):
-        public_share = self.create_share(
-            name='public_share',
-            description='public_share_desc',
-            share_type_id=self.share_type_id,
-            is_public=True,
-            cleanup_in_class=False
-        )
-        private_share = self.create_share(
-            name='private_share',
-            description='private_share_desc',
-            share_type_id=self.share_type_id,
-            is_public=False,
-            cleanup_in_class=False
-        )
-
-        params = {"is_public": True}
-        shares = self.alt_shares_client.list_shares_with_detail(params)
-
-        keys = [
-            "status", "description", "links", "availability_zone",
-            "created_at", "export_location", "share_proto",
-            "name", "snapshot_id", "id", "size", "project_id", "is_public",
-        ]
-        [self.assertIn(key, sh.keys()) for sh in shares for key in keys]
-
-        gen = [sid["id"] for sid in shares if sid["id"] == public_share["id"]]
-        msg = "expected id lists %s times in share list" % (len(gen))
-        self.assertEqual(1, len(gen), msg)
-
-        self.assertFalse(any([s["id"] == private_share["id"] for s in shares]))
-
     @decorators.idempotent_id('174829eb-fd3e-46ef-880b-f05c3d44d1fe')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     @testtools.skipUnless(CONF.share.run_snapshot_tests,
@@ -479,7 +446,7 @@
         if version is None:
             snapshot = self.shares_client.get_snapshot(self.snap["id"])
         else:
-            self.skip_if_microversion_not_supported(version)
+            utils.skip_if_microversion_not_supported(version)
             snapshot = self.shares_v2_client.get_snapshot(
                 self.snap["id"], version=version)
 
@@ -556,7 +523,7 @@
         if version is None:
             snaps = self.shares_client.list_snapshots_with_detail()
         else:
-            self.skip_if_microversion_not_supported(version)
+            utils.skip_if_microversion_not_supported(version)
             snaps = self.shares_v2_client.list_snapshots_with_detail(
                 version=version, params=params)
 
@@ -623,7 +590,7 @@
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
     @testtools.skipUnless(CONF.share.run_snapshot_tests,
                           "Snapshot tests are disabled.")
-    @base.skip_if_microversion_not_supported("2.35")
+    @utils.skip_if_microversion_not_supported("2.35")
     def test_list_snapshots_with_detail_filter_by_description(self):
         filters = {'description': self.snap_desc}
 
@@ -664,7 +631,8 @@
 
         # extend share and wait for active status
         self.shares_v2_client.extend_share(share['id'], new_size)
-        self.shares_client.wait_for_share_status(share['id'], 'available')
+        waiters.wait_for_resource_status(
+            self.shares_client, share['id'], 'available')
 
         # check state and new size
         share_get = self.shares_v2_client.get_share(share['id'])
@@ -691,7 +659,8 @@
 
         # shrink share and wait for active status
         self.shares_v2_client.shrink_share(share['id'], new_size)
-        self.shares_client.wait_for_share_status(share['id'], 'available')
+        waiters.wait_for_resource_status(
+            self.shares_client, share['id'], 'available')
 
         # check state and new size
         share_get = self.shares_v2_client.get_share(share['id'])
@@ -745,16 +714,15 @@
         new_name = data_utils.rand_name("tempest-new-name")
         new_desc = data_utils.rand_name("tempest-new-description")
         updated = self.shares_client.update_share(
-            share["id"], new_name, new_desc, is_public=True)
+            share["id"], name=new_name, desc=new_desc)
         self.assertEqual(new_name, updated["name"])
         self.assertEqual(new_desc, updated["description"])
-        self.assertTrue(updated["is_public"])
 
         # get share
         share = self.shares_client.get_share(self.share['id'])
         self.assertEqual(new_name, share["name"])
         self.assertEqual(new_desc, share["description"])
-        self.assertTrue(share["is_public"])
+        self.assertFalse(share["is_public"])
 
     @decorators.idempotent_id('20f299f6-2441-4629-b44e-d791d57f413c')
     @tc.attr(base.TAG_POSITIVE, base.TAG_API_WITH_BACKEND)
diff --git a/manila_tempest_tests/tests/api/test_shares_actions_negative.py b/manila_tempest_tests/tests/api/test_shares_actions_negative.py
index 5195d14..f7acfda 100644
--- a/manila_tempest_tests/tests/api/test_shares_actions_negative.py
+++ b/manila_tempest_tests/tests/api/test_shares_actions_negative.py
@@ -22,6 +22,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.tests.api import base
+from manila_tempest_tests import utils
 
 CONF = config.CONF
 
@@ -168,7 +169,7 @@
 
     @decorators.idempotent_id('ff307c91-3bb9-48b5-926c-5a2747320151')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.34")
+    @utils.skip_if_microversion_not_supported("2.34")
     @ddt.data('path', 'id')
     def test_list_shares_with_export_location_and_invalid_version(
             self, export_location_type):
@@ -185,7 +186,7 @@
 
     @decorators.idempotent_id('ffc3dc76-2f92-4308-a125-1d3905ed72ba')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_lt("2.35")
+    @utils.skip_if_microversion_lt("2.35")
     @ddt.data('path', 'id')
     def test_list_shares_with_export_location_not_exist(
             self, export_location_type):
@@ -199,7 +200,7 @@
 
     @decorators.idempotent_id('3dbcf17b-cc63-43ea-b45f-eae12300729e')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.36")
+    @utils.skip_if_microversion_not_supported("2.36")
     def test_list_shares_with_like_filter_and_invalid_version(self):
         # In API versions < v2.36, querying the share API by inexact
         # filter (name or description) should have no effect. Those
@@ -215,7 +216,7 @@
 
     @decorators.idempotent_id('f41c6cd2-62cf-4bba-a26e-21a6e86eae15')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.36")
+    @utils.skip_if_microversion_not_supported("2.36")
     def test_list_shares_with_like_filter_not_exist(self):
         filters = {
             'name~': 'fake_not_exist',
@@ -237,7 +238,7 @@
 
     @decorators.idempotent_id('5b0ceae1-357f-4b51-81a6-88973ea20c16')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.36")
+    @utils.skip_if_microversion_not_supported("2.36")
     def test_list_shares_with_description_not_exist(self):
         filters = {
             'description': "tempest-share",
@@ -248,7 +249,7 @@
 
     @decorators.idempotent_id('061ee37a-96b2-4b4f-9cfe-2c8c80ed4370')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    @base.skip_if_microversion_not_supported("2.36")
+    @utils.skip_if_microversion_not_supported("2.36")
     def test_list_snapshots_with_description_not_exist(self):
         filters = {
             'description': "tempest-snapshot",
@@ -268,3 +269,11 @@
             params=filters)
 
         self.assertEqual(0, len(shares))
+
+    @decorators.skip_because(bug='1914363')
+    @decorators.idempotent_id('e8f857f1-ec32-4f81-9e09-26065891dc93')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
+    def test_get_share_from_other_project(self):
+        self.assertRaises(lib_exc.NotFound,
+                          self.alt_shares_v2_client.get_share,
+                          self.share['id'])
diff --git a/manila_tempest_tests/tests/api/test_shares_from_snapshot_across_pools.py b/manila_tempest_tests/tests/api/test_shares_from_snapshot_across_pools.py
index f8c5255..c86c56d 100644
--- a/manila_tempest_tests/tests/api/test_shares_from_snapshot_across_pools.py
+++ b/manila_tempest_tests/tests/api/test_shares_from_snapshot_across_pools.py
@@ -147,7 +147,6 @@
             raise self.skipException(msg)
         azs = list(azs)
         share_a = self.create_share(share_type_id=self.share_type_id,
-                                    is_public=True,
                                     availability_zone=azs[0])
 
         # Create snapshot
diff --git a/manila_tempest_tests/tests/api/test_shares_negative.py b/manila_tempest_tests/tests/api/test_shares_negative.py
index 9ee4202..ac4b3f1 100644
--- a/manila_tempest_tests/tests/api/test_shares_negative.py
+++ b/manila_tempest_tests/tests/api/test_shares_negative.py
@@ -34,22 +34,6 @@
         cls.share_type = cls._create_share_type()
         cls.share_type_id = cls.share_type['id']
 
-        # create share
-        cls.share = cls.create_share(
-            name='public_share',
-            description='public_share_desc',
-            share_type_id=cls.share_type_id,
-            is_public=True,
-            metadata={'key': 'value'}
-        )
-
-    @decorators.idempotent_id('255011c0-4ed9-4174-bb13-8bbd06a62529')
-    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    def test_update_share_with_wrong_public_value(self):
-        self.assertRaises(lib_exc.BadRequest,
-                          self.shares_client.update_share, self.share["id"],
-                          is_public="truebar")
-
     @decorators.idempotent_id('b9bb8dee-0c7c-4e51-909c-028335b1a6a0')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
     @testtools.skipUnless(CONF.share.run_snapshot_tests,
@@ -154,45 +138,6 @@
             snapshot_id=snap["id"],
         )
 
-    @decorators.idempotent_id('3443493b-f56a-4faa-9968-e7cbb0d2802f')
-    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    def test_update_other_tenants_public_share(self):
-        self.assertRaises(lib_exc.Forbidden,
-                          self.alt_shares_v2_client.update_share,
-                          self.share["id"],
-                          name="new_name")
-
-    @decorators.idempotent_id('68d1f1bc-16e4-4086-8982-7e44ca6bdc4d')
-    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    def test_delete_other_tenants_public_share(self):
-        self.assertRaises(lib_exc.Forbidden,
-                          self.alt_shares_v2_client.delete_share,
-                          self.share['id'])
-
-    @decorators.idempotent_id('1f9e5d84-0885-4a4b-9196-9031a1c01508')
-    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    def test_set_metadata_of_other_tenants_public_share(self):
-        self.assertRaises(lib_exc.Forbidden,
-                          self.alt_shares_v2_client.set_metadata,
-                          self.share['id'],
-                          {'key': 'value'})
-
-    @decorators.idempotent_id('fed7a935-9699-43a1-854e-67b61ba6233e')
-    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    def test_update_metadata_of_other_tenants_public_share(self):
-        self.assertRaises(lib_exc.Forbidden,
-                          self.alt_shares_v2_client.update_all_metadata,
-                          self.share['id'],
-                          {'key': 'value'})
-
-    @decorators.idempotent_id('bd62adeb-73c2-4b04-8812-80b479cd5c3b')
-    @tc.attr(base.TAG_NEGATIVE, base.TAG_API_WITH_BACKEND)
-    def test_delete_metadata_of_other_tenants_public_share(self):
-        self.assertRaises(lib_exc.Forbidden,
-                          self.alt_shares_v2_client.delete_metadata,
-                          self.share['id'],
-                          'key')
-
 
 class SharesAPIOnlyNegativeTest(base.BaseSharesMixedTest):
 
@@ -201,7 +146,13 @@
         super(SharesAPIOnlyNegativeTest, cls).resource_setup()
         # create share_type
         cls.share_type = cls._create_share_type()
+        cls.share_type_min_2_max_5 = cls._create_share_type(
+            specs={
+                'provisioning:max_share_size': int(CONF.share.share_size) + 4,
+                'provisioning:min_share_size': int(CONF.share.share_size) + 1
+            })
         cls.share_type_id = cls.share_type['id']
+        cls.share_type_min_2_max_5_id = cls.share_type_min_2_max_5['id']
 
     @decorators.idempotent_id('75837f93-8c2c-40a4-bb9e-d76c53db07c7')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
@@ -331,3 +282,23 @@
         # Should not be able to delete share when empty ID is passed
         self.assertRaises(lib_exc.NotFound,
                           self.shares_client.delete_share, '')
+
+    @base.skip_if_microversion_lt("2.61")
+    @decorators.idempotent_id('b8097d56-067e-4d7c-8401-31bc7021fe86')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
+    def test_create_share_size_greater_than_specified_in_share_type(self):
+        # Should not be able to create share if size too large
+        self.assertRaises(lib_exc.BadRequest,
+                          self.create_share,
+                          size=int(CONF.share.share_size) + 5,
+                          share_type_id=self.share_type_min_2_max_5_id)
+
+    @base.skip_if_microversion_lt("2.61")
+    @decorators.idempotent_id('b8097d56-067e-4d7c-8401-31bc7021fe87')
+    @tc.attr(base.TAG_NEGATIVE, base.TAG_API)
+    def test_create_share_size_less_than_specified_in_share_type(self):
+        # Should not be able to create share if size too small
+        self.assertRaises(lib_exc.BadRequest,
+                          self.create_share,
+                          size=int(CONF.share.share_size),
+                          share_type_id=self.share_type_min_2_max_5_id)
diff --git a/manila_tempest_tests/tests/api/test_snapshot_rules.py b/manila_tempest_tests/tests/api/test_snapshot_rules.py
index e793843..615c68d 100644
--- a/manila_tempest_tests/tests/api/test_snapshot_rules.py
+++ b/manila_tempest_tests/tests/api/test_snapshot_rules.py
@@ -19,6 +19,7 @@
 from tempest.lib import decorators
 from testtools import testcase as tc
 
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests import utils
 
@@ -51,14 +52,16 @@
         for key in ('deleted', 'deleted_at', 'instance_mappings'):
             self.assertNotIn(key, list(six.iterkeys(rule)))
 
-        self.shares_v2_client.wait_for_snapshot_access_rule_status(
-            self.snapshot['id'], rule['id'])
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.snapshot['id'], 'active',
+            resource_name='snapshot_access_rule', rule_id=rule['id'],
+            status_attr='state')
 
         # delete rule and wait for deletion
         self.shares_v2_client.delete_snapshot_access_rule(self.snapshot['id'],
                                                           rule['id'])
-        self.shares_v2_client.wait_for_snapshot_access_rule_deletion(
-            self.snapshot['id'], rule['id'])
+        waiters.wait_for_snapshot_access_rule_deletion(
+            self.shares_v2_client, self.snapshot['id'], rule['id'])
 
 
 @ddt.ddt
diff --git a/manila_tempest_tests/tests/api/test_snapshot_rules_negative.py b/manila_tempest_tests/tests/api/test_snapshot_rules_negative.py
index 92bd639..8b3f2eb 100644
--- a/manila_tempest_tests/tests/api/test_snapshot_rules_negative.py
+++ b/manila_tempest_tests/tests/api/test_snapshot_rules_negative.py
@@ -19,6 +19,7 @@
 from tempest.lib import exceptions as lib_exc
 from testtools import testcase as tc
 
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests.tests.api import test_snapshot_rules
 from manila_tempest_tests import utils
@@ -92,8 +93,10 @@
         rule = self.shares_v2_client.create_snapshot_access_rule(
             self.snap['id'], access_type, access_to)
 
-        self.shares_v2_client.wait_for_snapshot_access_rule_status(
-            self.snap['id'], rule['id'])
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, self.snap['id'], 'active',
+            resource_name='snapshot_access_rule', rule_id=rule['id'],
+            status_attr='state')
 
         # try create duplicate of rule
         self.assertRaises(lib_exc.BadRequest,
@@ -113,8 +116,8 @@
         # delete rule and wait for deletion
         self.shares_v2_client.delete_snapshot_access_rule(self.snap['id'],
                                                           rule['id'])
-        self.shares_v2_client.wait_for_snapshot_access_rule_deletion(
-            self.snap['id'], rule['id'])
+        waiters.wait_for_snapshot_access_rule_deletion(
+            self.shares_v2_client, self.snap['id'], rule['id'])
 
         self.assertRaises(lib_exc.NotFound,
                           self.shares_v2_client.delete_snapshot_access_rule,
diff --git a/manila_tempest_tests/tests/scenario/manager_share.py b/manila_tempest_tests/tests/scenario/manager_share.py
index 84c87bd..0f55d6e 100644
--- a/manila_tempest_tests/tests/scenario/manager_share.py
+++ b/manila_tempest_tests/tests/scenario/manager_share.py
@@ -26,6 +26,7 @@
 
 from manila_tempest_tests.common import constants
 from manila_tempest_tests.common import remote_client
+from manila_tempest_tests.common import waiters as share_waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests.tests.scenario import manager
 from manila_tempest_tests import utils
@@ -347,12 +348,14 @@
         """
         client = client or self.shares_client
         client.delete_access_rule(share_id, access_rule_id)
-        self.shares_v2_client.wait_for_share_status(
-            share_id, "active", status_attr='access_rules_status')
+        share_waiters.wait_for_resource_status(
+            self.shares_v2_client, share_id, "active",
+            status_attr='access_rules_status')
 
-    def provide_access_to_auxiliary_instance(self, instance, share=None,
-                                             snapshot=None, access_level='rw',
-                                             client=None):
+    def _provide_access_to_client_identified_by_ip(self, instance, share=None,
+                                                   snapshot=None,
+                                                   access_level='rw',
+                                                   client=None):
         share = share or self.share
         client = client or self.shares_v2_client
         if not CONF.share.multitenancy_enabled:
@@ -375,14 +378,14 @@
                 share['id'], instance=instance, cleanup=False,
                 snapshot=snapshot, access_level=access_level, client=client)
 
-    def provide_access_to_client_identified_by_cephx(self, share=None,
-                                                     access_rule=None,
-                                                     access_level='rw',
-                                                     access_to=None,
-                                                     remote_client=None,
-                                                     locations=None,
-                                                     client=None,
-                                                     oc_size=20971520):
+    def _provide_access_to_client_identified_by_cephx(self, share=None,
+                                                      access_rule=None,
+                                                      access_level='rw',
+                                                      access_to=None,
+                                                      remote_client=None,
+                                                      locations=None,
+                                                      client=None,
+                                                      oc_size=20971520):
         """Provide an access to a client identified by cephx authentication
 
         :param: share: An existing share.
@@ -422,7 +425,7 @@
                 client.update_access_metadata(
                     metadata={"access_to": "{}".format(access_to)},
                     access_id=access_rule['id'])
-        get_access = client.get_access(access_rule['id'])
+        get_access = client.get_access_rule(access_rule['id'])
         # Set 'access_key' and 'access_to' attributes for being use in mount
         # operation.
         setattr(self, 'access_key', get_access['access_key'])
@@ -531,7 +534,8 @@
             self.addCleanup(client.delete_share,
                             share['id'])
 
-        client.wait_for_share_status(share['id'], 'available')
+        share_waiters.wait_for_resource_status(client, share['id'],
+                                               'available')
         return share
 
     def _create_snapshot(self, share_id, client=None, **kwargs):
@@ -540,7 +544,8 @@
         self.addCleanup(
             client.wait_for_resource_deletion, snapshot_id=snapshot['id'])
         self.addCleanup(client.delete_snapshot, snapshot['id'])
-        client.wait_for_snapshot_status(snapshot["id"], "available")
+        share_waiters.wait_for_resource_status(
+            client, snapshot["id"], "available", resource_name='snapshot')
         return snapshot
 
     def _wait_for_share_server_deletion(self, sn_id, client=None):
@@ -590,8 +595,8 @@
         access = client.create_access_rule(share_id, access_type, access_to,
                                            access_level)
 
-        client.wait_for_share_status(
-            share_id, "active", status_attr='access_rules_status')
+        share_waiters.wait_for_resource_status(
+            client, share_id, "active", status_attr='access_rules_status')
 
         if cleanup:
             self.addCleanup(client.delete_access_rule, share_id, access['id'])
@@ -616,8 +621,10 @@
             self.addCleanup(client.delete_snapshot_access_rule,
                             snapshot_id, access['id'])
 
-        client.wait_for_snapshot_access_rule_status(
-            snapshot_id, access['id'])
+        share_waiters.wait_for_resource_status(
+            client, snapshot_id, 'active',
+            resource_name='snapshot_access_rule', rule_id=access['id'],
+            status_attr='state')
 
         return access
 
@@ -642,15 +649,16 @@
             share_id, dest_host, writable=False, preserve_metadata=False,
             nondisruptive=False, preserve_snapshots=False,
             force_host_assisted_migration=force_host_assisted)
-        share = client.wait_for_migration_status(share_id, dest_host, status)
+        share = share_waiters.wait_for_migration_status(
+            client, share_id, dest_host, status)
         return share
 
     def _migration_complete(self, share_id, dest_host, client=None, **kwargs):
         client = client or self.shares_admin_v2_client
         client.migration_complete(share_id, **kwargs)
-        share = client.wait_for_migration_status(
-            share_id, dest_host, constants.TASK_STATE_MIGRATION_SUCCESS,
-            **kwargs)
+        share = share_waiters.wait_for_migration_status(
+            client, share_id, dest_host,
+            constants.TASK_STATE_MIGRATION_SUCCESS, **kwargs)
         return share
 
     def _create_share_type(self, name, is_public=True, **kwargs):
@@ -721,10 +729,67 @@
         return ip, version
 
 
-class BaseShareCEPHFSTest(ShareScenarioTest):
+class BaseShareScenarioNFSTest(ShareScenarioTest):
+    protocol = "nfs"
+
+    @classmethod
+    def skip_checks(cls):
+        super(BaseShareScenarioNFSTest, cls).skip_checks()
+        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
+            message = ("%s tests for access rules other than IP are disabled" %
+                       cls.protocol)
+            raise cls.skipException(message)
+
+    def allow_access(self, access_level='rw', **kwargs):
+        snapshot = kwargs.get('snapshot')
+        return self._provide_access_to_client_identified_by_ip(
+            instance=kwargs['instance'], access_level=access_level,
+            snapshot=snapshot)
+
+    def mount_share(self, location, ssh_client, target_dir=None):
+
+        self.validate_ping_to_export_location(location, ssh_client)
+
+        target_dir = target_dir or "/mnt"
+        ssh_client.exec_command(
+            "sudo mount -vt nfs \"%s\" %s" % (location, target_dir)
+        )
+
+
+class BaseShareScenarioCIFSTest(ShareScenarioTest):
+    protocol = 'cifs'
+
+    @classmethod
+    def skip_checks(cls):
+        super(BaseShareScenarioCIFSTest, cls).skip_checks()
+        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
+            message = ("%s tests for access rules other than IP are disabled" %
+                       cls.protocol)
+            raise cls.skipException(message)
+
+    def allow_access(self, access_level='rw', **kwargs):
+        snapshot = kwargs.get('snapshot')
+        return self._provide_access_to_client_identified_by_ip(
+            instance=kwargs['instance'],
+            snapshot=snapshot,
+            access_level=access_level)
+
+    def mount_share(self, location, ssh_client, target_dir=None):
+
+        self.validate_ping_to_export_location(location, ssh_client)
+
+        location = location.replace("\\", "/")
+        target_dir = target_dir or "/mnt"
+        ssh_client.exec_command(
+            "sudo mount.cifs \"%s\" %s -o guest" % (location, target_dir)
+        )
+
+
+class BaseShareScenarioCEPHFSTest(ShareScenarioTest):
+    protocol = 'cephfs'
 
     def allow_access(self, access_level='rw', access_rule=None, **kwargs):
-        return self.provide_access_to_client_identified_by_cephx(
+        return self._provide_access_to_client_identified_by_cephx(
             remote_client=kwargs['remote_client'],
             locations=kwargs['locations'], access_level=access_level,
             access_rule=access_rule)
@@ -755,4 +820,5 @@
         if getattr(self, 'mount_client', None):
             return remote_client.exec_command(
                 "sudo fusermount -uz %s" % target_dir)
-        super(BaseShareCEPHFSTest, self).unmount_share(remote_client)
+        super(BaseShareScenarioCEPHFSTest, self).unmount_share(
+            remote_client, target_dir=target_dir)
diff --git a/manila_tempest_tests/tests/scenario/test_share_basic_ops.py b/manila_tempest_tests/tests/scenario/test_share_basic_ops.py
index 70e6a30..9f0ec70 100644
--- a/manila_tempest_tests/tests/scenario/test_share_basic_ops.py
+++ b/manila_tempest_tests/tests/scenario/test_share_basic_ops.py
@@ -133,7 +133,7 @@
 
     @decorators.idempotent_id('15d42949-545e-4ad8-b06e-bb2556c54375')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.29")
+    @utils.skip_if_microversion_lt("2.29")
     @testtools.skipUnless(CONF.share.run_host_assisted_migration_tests or
                           CONF.share.run_driver_assisted_migration_tests,
                           "Share migration tests are disabled.")
@@ -165,7 +165,7 @@
 
         instance = self.boot_instance(wait_until="BUILD")
         self.create_share()
-        exports = self.get_user_export_locations(self.share)
+        export_location = self.get_user_export_locations(self.share)[0]
         instance = self.wait_for_active_instance(instance["id"])
         self.share = self.shares_admin_v2_client.get_share(self.share['id'])
 
@@ -181,9 +181,12 @@
         dest_pool = dest_pool['name']
 
         remote_client = self.init_remote_client(instance)
-        self.provide_access_to_auxiliary_instance(instance)
 
-        self.mount_share(exports[0], remote_client)
+        self.allow_access(instance=instance,
+                          remote_client=remote_client,
+                          locations=export_location)
+
+        self.mount_share(export_location, remote_client)
 
         remote_client.exec_command("sudo mkdir -p /mnt/f1")
         remote_client.exec_command("sudo mkdir -p /mnt/f2")
@@ -250,6 +253,10 @@
 
         # 2 - Create share S1, ok, created
         parent_share = self.create_share()
+        parent_share_export_location = self.get_user_export_locations(
+            parent_share)[0]
+
+        # Create a client User Virtual Machine
         instance = self.wait_for_active_instance(instance["id"])
         self.addCleanup(self.servers_client.delete_server, instance['id'])
 
@@ -257,14 +264,18 @@
         remote_client = self.init_remote_client(instance)
 
         # 4 - Provide RW access to S1, ok, provided
-        self.provide_access_to_auxiliary_instance(instance, parent_share)
+        self.allow_access(instance=instance,
+                          remote_client=remote_client,
+                          locations=parent_share_export_location)
 
         # 5 - Try mount S1 to UVM, ok, mounted
-        user_export_location = self.get_user_export_locations(parent_share)[0]
+
         parent_share_dir = "/mnt/parent"
         remote_client.exec_command("sudo mkdir -p %s" % parent_share_dir)
 
-        self.mount_share(user_export_location, remote_client, parent_share_dir)
+        self.mount_share(parent_share_export_location,
+                         remote_client,
+                         parent_share_dir)
         self.addCleanup(self.unmount_share, remote_client, parent_share_dir)
 
         # 6 - Create "file1", ok, created
@@ -282,21 +293,26 @@
 
         # 10 - Try mount S2 - fail, access denied. We test that child share
         #      did not get access rules from parent share.
-        user_export_location = self.get_user_export_locations(child_share)[0]
+        child_share_export_location = self.get_user_export_locations(
+            child_share)[0]
         child_share_dir = "/mnt/child"
         remote_client.exec_command("sudo mkdir -p %s" % child_share_dir)
 
         self.assertRaises(
             exceptions.SSHExecCommandFailed,
             self.mount_share,
-            user_export_location, remote_client, child_share_dir,
+            child_share_export_location, remote_client, child_share_dir,
         )
 
         # 11 - Provide RW access to S2, ok, provided
-        self.provide_access_to_auxiliary_instance(instance, child_share)
+        self.allow_access(instance=instance,
+                          remote_client=remote_client,
+                          locations=child_share_export_location)
 
         # 12 - Try mount S2, ok, mounted
-        self.mount_share(user_export_location, remote_client, child_share_dir)
+        self.mount_share(child_share_export_location,
+                         remote_client,
+                         child_share_dir)
         self.addCleanup(self.unmount_share, remote_client, child_share_dir)
 
         # 13 - List files on S2, only "file1" exists
@@ -324,7 +340,7 @@
 
     @decorators.idempotent_id('c98e6876-3a4f-40e8-8b4f-023c94c242c3')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
-    @base.skip_if_microversion_lt("2.32")
+    @utils.skip_if_microversion_lt("2.32")
     @testtools.skipUnless(CONF.share.run_mount_snapshot_tests,
                           'Mountable snapshots tests are disabled.')
     @testtools.skipUnless(CONF.share.run_snapshot_tests,
@@ -335,6 +351,9 @@
 
         # 2 - Create share S1, ok, created
         parent_share = self.create_share()
+        user_export_location = self.get_user_export_locations(parent_share)[0]
+
+        # Create client User Virtual Machine
         instance = self.wait_for_active_instance(instance["id"])
         self.addCleanup(self.servers_client.delete_server, instance['id'])
 
@@ -342,10 +361,11 @@
         remote_client = self.init_remote_client(instance)
 
         # 4 - Provide RW access to S1, ok, provided
-        self.provide_access_to_auxiliary_instance(instance, parent_share)
+        self.allow_access(instance=instance,
+                          remote_client=remote_client,
+                          locations=user_export_location)
 
         # 5 - Try mount S1 to UVM, ok, mounted
-        user_export_location = self.get_user_export_locations(parent_share)[0]
         parent_share_dir = "/mnt/parent"
         snapshot_dir = "/mnt/snapshot_dir"
         remote_client.exec_command("sudo mkdir -p %s" % parent_share_dir)
@@ -359,18 +379,21 @@
 
         # 7 - Create snapshot SS1 from S1, ok, created
         snapshot = self._create_snapshot(parent_share['id'])
+        snapshot_export_location = self.get_user_export_locations(
+            snapshot=snapshot)[0]
 
         # 8 - Create "file2" in share S1 - ok, created. We expect that
         # snapshot will not contain any data created after snapshot creation.
         remote_client.exec_command("sudo touch %s/file2" % parent_share_dir)
 
         # 9 - Allow access to SS1
-        self.provide_access_to_auxiliary_instance(instance, snapshot=snapshot)
+        self.allow_access(instance=instance,
+                          snapshot=snapshot,
+                          remote_client=remote_client,
+                          locations=snapshot_export_location)
 
         # 10 - Mount SS1
-        user_export_location = self.get_user_export_locations(
-            snapshot=snapshot)[0]
-        self.mount_share(user_export_location, remote_client, snapshot_dir)
+        self.mount_share(snapshot_export_location, remote_client, snapshot_dir)
         self.addCleanup(self.unmount_share, remote_client, snapshot_dir)
 
         # 11 - List files on SS1, only "file1" exists
@@ -387,54 +410,13 @@
             "sudo touch %s/file3" % snapshot_dir)
 
 
-class TestShareBasicOpsNFS(ShareBasicOpsBase):
-    protocol = "nfs"
-
-    @classmethod
-    def skip_checks(cls):
-        super(TestShareBasicOpsNFS, cls).skip_checks()
-        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
-            message = ("%s tests for access rules other than IP are disabled" %
-                       cls.protocol)
-            raise cls.skipException(message)
-
-    def allow_access(self, access_level='rw', **kwargs):
-        return self.provide_access_to_auxiliary_instance(
-            instance=kwargs['instance'], access_level=access_level)
-
-    def mount_share(self, location, remote_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, remote_client)
-
-        target_dir = target_dir or "/mnt"
-        remote_client.exec_command(
-            "sudo mount -vt nfs \"%s\" %s" % (location, target_dir))
+class TestShareBasicOpsNFS(manager.BaseShareScenarioNFSTest,
+                           ShareBasicOpsBase):
+    pass
 
 
-class TestShareBasicOpsCIFS(ShareBasicOpsBase):
-    protocol = "cifs"
-
-    @classmethod
-    def skip_checks(cls):
-        super(TestShareBasicOpsCIFS, cls).skip_checks()
-        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
-            message = ("%s tests for access rules other than IP are disabled" %
-                       cls.protocol)
-            raise cls.skipException(message)
-
-    def allow_access(self, access_level='rw', **kwargs):
-        return self.provide_access_to_auxiliary_instance(
-            instance=kwargs['instance'], access_level=access_level)
-
-    def mount_share(self, location, remote_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, remote_client)
-
-        location = location.replace("\\", "/")
-        target_dir = target_dir or "/mnt"
-        remote_client.exec_command(
-            "sudo mount.cifs \"%s\" %s -o guest" % (location, target_dir)
-        )
+class TestShareBasicOpsCIFS(manager.BaseShareScenarioCIFSTest,
+                            ShareBasicOpsBase):
 
     @decorators.idempotent_id('4344a47a-d316-496b-97a4-12a59297950a')
     @tc.attr(base.TAG_NEGATIVE, base.TAG_BACKEND)
@@ -456,26 +438,36 @@
         raise self.skipException(msg)
 
 
-class TestShareBasicOpsCEPHFS(ShareBasicOpsBase, manager.BaseShareCEPHFSTest):
-    protocol = "cephfs"
-
+class TestBaseShareBasicOpsScenarioCEPHFS(manager.BaseShareScenarioCEPHFSTest,
+                                          ShareBasicOpsBase):
     @decorators.idempotent_id('9fb12879-45b3-4042-acac-82be338dbde1')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
     def test_mount_share_one_vm_with_ceph_fuse_client(self):
         self.mount_client = 'fuse'
-        super(TestShareBasicOpsCEPHFS, self).test_mount_share_one_vm()
+        super(TestBaseShareBasicOpsScenarioCEPHFS,
+              self).test_mount_share_one_vm()
 
     @decorators.idempotent_id('a2a70b94-f5fc-438a-9dfa-53aa60ee3949')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
     def test_write_with_ro_access_with_ceph_fuse_client(self):
         self.mount_client = 'fuse'
-        super(TestShareBasicOpsCEPHFS, self).test_write_with_ro_access()
+        super(TestBaseShareBasicOpsScenarioCEPHFS,
+              self).test_write_with_ro_access()
 
     @decorators.idempotent_id('c247f51f-0ffc-4a4f-894c-781647619faf')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
     def test_read_write_two_vms_with_ceph_fuse_client(self):
         self.mount_client = 'fuse'
-        super(TestShareBasicOpsCEPHFS, self).test_read_write_two_vms()
+        super(TestBaseShareBasicOpsScenarioCEPHFS,
+              self).test_read_write_two_vms()
+
+    @decorators.idempotent_id('5bd64c46-05f4-4891-a08f-e146d1a76437')
+    @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
+    def test_write_data_to_share_created_from_snapshot_with_ceph_fuse_client(
+            self):
+        self.mount_client = 'fuse'
+        super(TestBaseShareBasicOpsScenarioCEPHFS,
+              self).test_write_data_to_share_created_from_snapshot()
 
 
 class TestShareBasicOpsNFSIPv6(TestShareBasicOpsNFS):
diff --git a/manila_tempest_tests/tests/scenario/test_share_extend.py b/manila_tempest_tests/tests/scenario/test_share_extend.py
index df77990..76cfd2a 100644
--- a/manila_tempest_tests/tests/scenario/test_share_extend.py
+++ b/manila_tempest_tests/tests/scenario/test_share_extend.py
@@ -20,6 +20,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests.tests.scenario import manager_share as manager
 
@@ -95,8 +96,8 @@
         extended_share_size = default_share_size + 1
         self.shares_v2_client.extend_share(share["id"],
                                            new_size=extended_share_size)
-        self.shares_v2_client.wait_for_share_status(share["id"],
-                                                    constants.STATUS_AVAILABLE)
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share["id"], constants.STATUS_AVAILABLE)
         share = self.shares_v2_client.get_share(share["id"])
         self.assertEqual(extended_share_size, int(share["size"]))
 
@@ -145,65 +146,23 @@
                 raise
 
 
-class TestShareExtendNFS(ShareExtendBase):
-    protocol = "nfs"
-
-    @classmethod
-    def skip_checks(cls):
-        super(ShareExtendBase, cls).skip_checks()
-        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
-            message = ("%s tests for access rules other than IP are disabled" %
-                       cls.protocol)
-            raise cls.skipException(message)
-
-    def allow_access(self, access_level='rw', **kwargs):
-        return self.provide_access_to_auxiliary_instance(
-            instance=kwargs['instance'], access_level=access_level)
-
-    def mount_share(self, location, remote_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, remote_client)
-
-        target_dir = target_dir or "/mnt"
-        remote_client.exec_command(
-            "sudo mount -vt nfs \"%s\" %s" % (location, target_dir)
-        )
+class TestShareExtendNFS(manager.BaseShareScenarioNFSTest, ShareExtendBase):
+    pass
 
 
-class TestShareExtendCIFS(ShareExtendBase):
-    protocol = "cifs"
-
-    @classmethod
-    def skip_checks(cls):
-        super(ShareExtendBase, cls).skip_checks()
-        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
-            message = ("%s tests for access rules other than IP are disabled" %
-                       cls.protocol)
-            raise cls.skipException(message)
-
-    def allow_access(self, access_level='rw', **kwargs):
-        return self.provide_access_to_auxiliary_instance(
-            instance=kwargs['instance'], access_level=access_level)
-
-    def mount_share(self, location, remote_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, remote_client)
-
-        location = location.replace("\\", "/")
-        target_dir = target_dir or "/mnt"
-        remote_client.exec_command(
-            "sudo mount.cifs \"%s\" %s -o guest" % (location, target_dir)
-        )
+class TestShareExtendCIFS(manager.BaseShareScenarioCIFSTest, ShareExtendBase):
+    pass
 
 
-class TestShareExtendCEPHFS(ShareExtendBase, manager.BaseShareCEPHFSTest):
-    protocol = "cephfs"
+class TestBaseShareExtendScenarioCEPHFS(manager.BaseShareScenarioCEPHFSTest,
+                                        ShareExtendBase):
 
     @decorators.idempotent_id('9ca1e4a9-23e3-4da6-a63e-46e7919335e0')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
     def test_create_extend_and_write_with_ceph_fuse_client(self):
         self.mount_client = 'fuse'
-        super(TestShareExtendCEPHFS, self).test_create_extend_and_write()
+        super(TestBaseShareExtendScenarioCEPHFS,
+              self).test_create_extend_and_write()
 
 
 class TestShareExtendNFSIPv6(TestShareExtendNFS):
diff --git a/manila_tempest_tests/tests/scenario/test_share_manage_unmanage.py b/manila_tempest_tests/tests/scenario/test_share_manage_unmanage.py
index 7da1974..aa164ea 100644
--- a/manila_tempest_tests/tests/scenario/test_share_manage_unmanage.py
+++ b/manila_tempest_tests/tests/scenario/test_share_manage_unmanage.py
@@ -18,6 +18,7 @@
 import testtools
 from testtools import testcase as tc
 
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests.tests.scenario import manager_share as manager
 from manila_tempest_tests import utils
@@ -82,7 +83,7 @@
         remote_client = self.init_remote_client(instance)
 
         LOG.debug('Step 4 - provide access to instance')
-        self.provide_access_to_auxiliary_instance(instance, share=share)
+        self._provide_access_to_client_identified_by_ip(instance, share=share)
 
         if utils.is_microversion_lt(CONF.share.max_api_microversion, "2.9"):
             locations = share['export_locations']
@@ -131,11 +132,11 @@
             share['share_proto'],
             locations[0],
             share_type['id'])
-        self.shares_admin_v2_client.wait_for_share_status(
-            managed_share['id'], 'available')
+        waiters.wait_for_resource_status(
+            self.shares_admin_v2_client, managed_share['id'], 'available')
 
         LOG.debug('Step 11 - grant access again')
-        self.provide_access_to_auxiliary_instance(
+        self._provide_access_to_client_identified_by_ip(
             instance,
             share=managed_share,
             client=self.shares_admin_v2_client)
@@ -165,8 +166,8 @@
             share['share_proto'],
             locations[0],
             share_type['id'])
-        self.shares_admin_v2_client.wait_for_share_status(
-            remanaged_share['id'], 'manage_error')
+        waiters.wait_for_resource_status(
+            self.shares_admin_v2_client, remanaged_share['id'], 'manage_error')
 
         self.shares_admin_v2_client.reset_state(remanaged_share['id'])
 
@@ -176,31 +177,14 @@
             share_id=remanaged_share['id'])
 
 
-class ShareManageUnmanageNFS(ShareManageUnmanageBase):
-    protocol = "nfs"
-
-    def mount_share(self, location, remote_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, remote_client)
-
-        target_dir = target_dir or "/mnt"
-        remote_client.exec_command(
-            "sudo mount -vt nfs \"%s\" %s" % (location, target_dir)
-        )
+class ShareManageUnmanageNFS(manager.BaseShareScenarioNFSTest,
+                             ShareManageUnmanageBase):
+    pass
 
 
-class ShareManageUnmanageCIFS(ShareManageUnmanageBase):
-    protocol = "cifs"
-
-    def mount_share(self, location, remote_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, remote_client)
-
-        location = location.replace("\\", "/")
-        target_dir = target_dir or "/mnt"
-        remote_client.exec_command(
-            "sudo mount.cifs \"%s\" %s -o guest" % (location, target_dir)
-        )
+class ShareManageUnmanageCIFS(manager.BaseShareScenarioCIFSTest,
+                              ShareManageUnmanageBase):
+    pass
 
 
 class ShareManageUnmanageNFSIPv6(ShareManageUnmanageNFS):
diff --git a/manila_tempest_tests/tests/scenario/test_share_shrink.py b/manila_tempest_tests/tests/scenario/test_share_shrink.py
index 3e498f8..241dbb3 100644
--- a/manila_tempest_tests/tests/scenario/test_share_shrink.py
+++ b/manila_tempest_tests/tests/scenario/test_share_shrink.py
@@ -21,6 +21,7 @@
 from testtools import testcase as tc
 
 from manila_tempest_tests.common import constants
+from manila_tempest_tests.common import waiters
 from manila_tempest_tests.tests.api import base
 from manila_tempest_tests.tests.scenario import manager_share as manager
 
@@ -86,8 +87,9 @@
         LOG.debug('Step 8 - try update size, shrink and wait')
         self.shares_v2_client.shrink_share(share['id'],
                                            new_size=default_share_size)
-        self.shares_v2_client.wait_for_share_status(
-            share['id'], ['shrinking_possible_data_loss_error', 'available'])
+        waiters.wait_for_resource_status(
+            self.shares_v2_client, share['id'],
+            ['shrinking_possible_data_loss_error', 'available'])
 
         share = self.shares_v2_client.get_share(share["id"])
 
@@ -160,65 +162,22 @@
                 raise exceptions.TimeoutException(message)
 
 
-class TestShareShrinkNFS(ShareShrinkBase):
-    protocol = "nfs"
-
-    @classmethod
-    def skip_checks(cls):
-        super(ShareShrinkBase, cls).skip_checks()
-        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
-            message = ("%s tests for access rules other than IP are disabled" %
-                       cls.protocol)
-            raise cls.skipException(message)
-
-    def allow_access(self, access_level='rw', **kwargs):
-        return self.provide_access_to_auxiliary_instance(
-            instance=kwargs['instance'], access_level=access_level)
-
-    def mount_share(self, location, ssh_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, ssh_client)
-
-        target_dir = target_dir or "/mnt"
-        ssh_client.exec_command(
-            "sudo mount -vt nfs \"%s\" %s" % (location, target_dir)
-        )
+class TestShareShrinkNFS(manager.BaseShareScenarioNFSTest, ShareShrinkBase):
+    pass
 
 
-class TestShareShrinkCIFS(ShareShrinkBase):
-    protocol = "cifs"
-
-    @classmethod
-    def skip_checks(cls):
-        super(ShareShrinkBase, cls).skip_checks()
-        if cls.protocol not in CONF.share.enable_ip_rules_for_protocols:
-            message = ("%s tests for access rules other than IP are disabled" %
-                       cls.protocol)
-            raise cls.skipException(message)
-
-    def allow_access(self, access_level='rw', **kwargs):
-        return self.provide_access_to_auxiliary_instance(
-            instance=kwargs['instance'], access_level=access_level)
-
-    def mount_share(self, location, ssh_client, target_dir=None):
-
-        self.validate_ping_to_export_location(location, ssh_client)
-
-        location = location.replace("\\", "/")
-        target_dir = target_dir or "/mnt"
-        ssh_client.exec_command(
-            "sudo mount.cifs \"%s\" %s -o guest" % (location, target_dir)
-        )
+class TestShareShrinkCIFS(manager.BaseShareScenarioCIFSTest, ShareShrinkBase):
+    pass
 
 
-class TestShareShrinkCEPHFS(ShareShrinkBase, manager.BaseShareCEPHFSTest):
-    protocol = "cephfs"
-
+class TestBaseShareShrinkScenarioCEPHFS(manager.BaseShareScenarioCEPHFSTest,
+                                        ShareShrinkBase):
     @decorators.idempotent_id('7fb324ed-7479-4bd9-b022-b3739dee9bcb')
     @tc.attr(base.TAG_POSITIVE, base.TAG_BACKEND)
     def test_create_shrink_and_write_with_ceph_fuse_client(self):
         self.mount_client = 'fuse'
-        super(TestShareShrinkCEPHFS, self).test_create_shrink_and_write()
+        super(TestBaseShareShrinkScenarioCEPHFS,
+              self).test_create_shrink_and_write()
 
 
 class TestShareShrinkNFSIPv6(TestShareShrinkNFS):
diff --git a/test-requirements.txt b/test-requirements.txt
index cf025c3..e422e93 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -6,11 +6,8 @@
 
 coverage!=4.4,>=4.0 # Apache-2.0
 python-subunit>=1.0.0 # Apache-2.0/BSD
-sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
 oslotest>=3.2.0 # Apache-2.0
 stestr>=1.0.0 # Apache-2.0
 testtools>=2.2.0 # MIT
-openstackdocstheme>=1.31.2 # Apache-2.0
 # releasenotes
-reno>=2.5.0 # Apache-2.0
 flake8-import-order
diff --git a/tox.ini b/tox.ini
index 5cb26a3..153ebbf 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
 [tox]
-minversion = 2.0
+minversion = 3.1.1
 envlist = py3,pypy,pep8
 skipsdist = True
 
@@ -31,9 +31,12 @@
 commands = {posargs}
 
 [testenv:docs]
+deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
+       -r{toxinidir}/doc/requirements.txt
 commands = sphinx-build -W -b html doc/source doc/build/html
 
 [testenv:releasenotes]
+deps = {[testenv:docs]deps}
 commands =
   sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
 
diff --git a/zuul.d/manila-tempest-jobs.yaml b/zuul.d/manila-tempest-jobs.yaml
index 0727b43..7d7b08e 100644
--- a/zuul.d/manila-tempest-jobs.yaml
+++ b/zuul.d/manila-tempest-jobs.yaml
@@ -317,7 +317,6 @@
               enable_cephx_rules_for_protocols: cephfs
               run_share_group_tests: false
               multitenancy_enabled: false
-              capability_create_share_from_snapshot_support: false
               suppress_errors_in_cleanup: true
               backend_names: CEPHFSNATIVE
               capability_storage_protocol: CEPHFS
@@ -329,16 +328,11 @@
     name: manila-tempest-plugin-cephfs-nfs
     description: Test CephFS NFS (DHSS=False)
     parent: manila-tempest-plugin-base
-    # TODO(gmann): Remove the below nodeset setting to Bionic once
-    # https://bugs.launchpad.net/manila/+bug/1896672 is fixed
-    # Once nodeset is removed form here then devstack base job
-    # will automatically run this job on Ubuntu Focal nodeset from
-    # Victoria gate onwards.
-    nodeset: openstack-single-node-bionic
     required-projects:
       - openstack/devstack-plugin-ceph
       - openstack/neutron-dynamic-routing
     vars:
+      tempest_concurrency: 2
       devstack_plugins:
         devstack-plugin-ceph: https://opendev.org/openstack/devstack-plugin-ceph
         neutron-dynamic-routing: https://opendev.org/openstack/neutron-dynamic-routing
@@ -373,7 +367,6 @@
               backend_names: CEPHFSNFS
               enable_protocols: nfs
               capability_storage_protocol: NFS
-              capability_create_share_from_snapshot_support: false
               image_password: manila
 
 - job: