Merge "Pass ca_certs to VNC tests" into mcp/caracal
diff --git a/tempest/api/network/test_subnetpools_extensions.py b/tempest/api/network/test_subnetpools_extensions.py
index 689844b..f398062 100644
--- a/tempest/api/network/test_subnetpools_extensions.py
+++ b/tempest/api/network/test_subnetpools_extensions.py
@@ -45,6 +45,9 @@
         if not utils.is_extension_enabled('subnet_allocation', 'network'):
             msg = "subnet_allocation extension not enabled."
             raise cls.skipException(msg)
+        if not utils.is_extension_enabled('default-subnetpools', 'network'):
+            msg = "default-subnetpools extension not enabled."
+            raise cls.skipException(msg)
 
     @decorators.attr(type='smoke')
     @decorators.idempotent_id('62595970-ab1c-4b7f-8fcc-fddfe55e9811')
diff --git a/tempest/api/object_storage/test_container_quotas.py b/tempest/api/object_storage/test_container_quotas.py
index f055d19..d4bf0d7 100644
--- a/tempest/api/object_storage/test_container_quotas.py
+++ b/tempest/api/object_storage/test_container_quotas.py
@@ -88,9 +88,12 @@
         nafter = self._get_bytes_used()
         self.assertEqual(nbefore, nafter)
 
+    # NOTE(vsaienko): the quotas imlementation on active/active rgw deployment
+    # have no coordination, as result quota overflow might happen, since
+    # this is upstream and we can't fix downstream. Remove test from smoke
+    # Related-Prod: PRODX-11581
     @decorators.idempotent_id('3a387039-697a-44fc-a9c0-935de31f426b')
     @utils.requires_ext(extension='container_quotas', service='object')
-    @decorators.attr(type="smoke")
     def test_upload_too_many_objects(self):
         """Attempts to upload many objects that exceeds the count limit."""
         for _ in range(QUOTA_COUNT):
diff --git a/tempest/api/volume/admin/test_volume_retype.py b/tempest/api/volume/admin/test_volume_retype.py
index 7c25f3d..4cb2262 100644
--- a/tempest/api/volume/admin/test_volume_retype.py
+++ b/tempest/api/volume/admin/test_volume_retype.py
@@ -179,10 +179,11 @@
         keys_with_change = ('volume_type',)
 
         # NOTE(vsaienko): with active-active cluster deployment volume
-        # services registered with different hostname.
-        if CONF.volume_feature_enabled.cluster_active_active:
-            keys_with_change += ('os-vol-host-attr:host',)
-        else:
+        # services registered with different hostname since we don't know
+        # which service process request host might or might not be changed.
+        # TODO(vsaienko): Revisit logic when is fixed
+        # https://bugs.launchpad.net/cinder/+bug/1874414
+        if not CONF.volume_feature_enabled.cluster_active_active:
             keys_with_no_change += ('os-vol-host-attr:host',)
 
         # Check the volume information after the retype
diff --git a/tempest/lib/common/rest_client.py b/tempest/lib/common/rest_client.py
index a2f2931..4e1dc59 100644
--- a/tempest/lib/common/rest_client.py
+++ b/tempest/lib/common/rest_client.py
@@ -94,6 +94,7 @@
         self.build_interval = build_interval
         self.build_timeout = build_timeout
         self.trace_requests = trace_requests
+        self.ca_certs = ca_certs
 
         self._skip_path = False
         self.general_header_lc = set(('cache-control', 'connection',
diff --git a/tempest/lib/services/object_storage/object_client.py b/tempest/lib/services/object_storage/object_client.py
index 65e8227..c7ac80f 100644
--- a/tempest/lib/services/object_storage/object_client.py
+++ b/tempest/lib/services/object_storage/object_client.py
@@ -167,11 +167,14 @@
         :param parsed_url: parsed url of the remote location
         """
         context = None
-        # If CONF.identity.disable_ssl_certificate_validation is true,
-        # do not check ssl certification.
-        if self.dscv:
-            context = ssl._create_unverified_context()
         if parsed_url.scheme == 'https':
+            # If CONF.identity.disable_ssl_certificate_validation is true,
+            # do not check ssl certification.
+            if self.dscv:
+                context = ssl._create_unverified_context()
+            else:
+                context = ssl.create_default_context(
+                    cafile=self.ca_certs)
             conn = httplib.HTTPSConnection(parsed_url.netloc,
                                            context=context)
         else:
diff --git a/tempest/serial_tests/api/admin/test_aggregates.py b/tempest/serial_tests/api/admin/test_aggregates.py
index cedeec0..ce54957 100644
--- a/tempest/serial_tests/api/admin/test_aggregates.py
+++ b/tempest/serial_tests/api/admin/test_aggregates.py
@@ -222,6 +222,9 @@
     @decorators.idempotent_id('96be03c7-570d-409c-90f8-e4db3c646996')
     def test_aggregate_add_host_create_server_with_az(self):
         """Test adding a host to the given aggregate and creating a server"""
+        if CONF.production:
+            raise self.skipException("Not allowed to run this test "
+                                     "on production environment")
         self.useFixture(fixtures.LockFixture('availability_zone'))
         az_name = data_utils.rand_name(
             prefix=CONF.resource_name_prefix, name=self.az_name_prefix)
@@ -235,12 +238,20 @@
             if agg['availability_zone']:
                 hosts_in_zone.extend(agg['hosts'])
         hosts = [v for v in self.hosts_available if v not in hosts_in_zone]
-        if not hosts:
+        hosts_available = []
+        for host in hosts:
+            hypervisor_servers = (
+                self.os_admin.hypervisor_client.list_servers_on_hypervisor(
+                    host)["hypervisors"][0].get("servers", None))
+            if not hypervisor_servers:
+                hosts_available.append(host)
+        if not hosts_available:
             raise self.skipException("All hosts are already in other "
-                                     "availability zones, so can't add "
+                                     "availability zones or have running "
+                                     "instances, so can't add "
                                      "host to aggregate. \nAggregates list: "
                                      "%s" % aggregates)
-        host = hosts[0]
+        host = hosts_available[0]
 
         self.client.add_host(aggregate['id'], host=host)
         self.addCleanup(self.client.remove_host, aggregate['id'], host=host)
diff --git a/tempest/serial_tests/scenario/test_aggregates_basic_ops.py b/tempest/serial_tests/scenario/test_aggregates_basic_ops.py
index a831fe5..cc45297 100644
--- a/tempest/serial_tests/scenario/test_aggregates_basic_ops.py
+++ b/tempest/serial_tests/scenario/test_aggregates_basic_ops.py
@@ -63,7 +63,11 @@
         hosts_available = []
         for host in svc_list:
             if (host['state'] == 'up' and host['status'] == 'enabled'):
-                hosts_available.append(host['host'])
+                hypervisor_servers = (
+                    self.os_admin.hypervisor_client.list_servers_on_hypervisor(
+                        host["host"])["hypervisors"][0].get("servers", None))
+                if not hypervisor_servers:
+                    hosts_available.append(host["host"])
         aggregates = self.aggregates_client.list_aggregates()['aggregates']
         hosts_in_zone = []
         for agg in aggregates:
@@ -72,7 +76,8 @@
         hosts = [v for v in hosts_available if v not in hosts_in_zone]
         if not hosts:
             raise self.skipException("All hosts are already in other "
-                                     "availability zones, so can't add "
+                                     "availability zones or have running "
+                                     "instances, so can't add "
                                      "host to aggregate. \nAggregates list: "
                                      "%s" % aggregates)
         return hosts[0]
@@ -120,6 +125,9 @@
     @decorators.attr(type='slow')
     @utils.services('compute')
     def test_aggregate_basic_ops(self):
+        if CONF.production:
+            raise self.skipException("Not allowed to run this test "
+                                     "on production environment")
         self.useFixture(fixtures.LockFixture('availability_zone'))
         az = 'foo_zone'
         aggregate_name = data_utils.rand_name(