Merge "Fix create_security_group_rule to work with python 2.6"
diff --git a/requirements.txt b/requirements.txt
index 19d6e0b..df9951d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,7 +3,7 @@
 anyjson
 nose
 httplib2>=0.7.0
-testtools>=0.9.29
+testtools>=0.9.32
 lxml
 boto>=2.2.1
 paramiko
diff --git a/tempest/api/compute/base.py b/tempest/api/compute/base.py
index 0fa5a84..abc5899 100644
--- a/tempest/api/compute/base.py
+++ b/tempest/api/compute/base.py
@@ -20,6 +20,7 @@
 from tempest.api import compute
 from tempest import clients
 from tempest.common import log as logging
+from tempest.common.utils.data_utils import parse_image_id
 from tempest.common.utils.data_utils import rand_name
 from tempest import exceptions
 import tempest.test
@@ -73,6 +74,7 @@
         cls.flavor_ref = cls.config.compute.flavor_ref
         cls.flavor_ref_alt = cls.config.compute.flavor_ref_alt
         cls.servers = []
+        cls.images = []
 
         cls.servers_client_v3_auth = os.servers_client_v3_auth
 
@@ -174,7 +176,18 @@
                 pass
 
     @classmethod
+    def clear_images(cls):
+        for image_id in cls.images:
+            try:
+                cls.images_client.delete_image(image_id)
+            except Exception as exc:
+                LOG.info('Exception raised deleting image %s', image_id)
+                LOG.exception(exc)
+                pass
+
+    @classmethod
     def tearDownClass(cls):
+        cls.clear_images()
         cls.clear_servers()
         cls.clear_isolated_creds()
 
@@ -206,6 +219,25 @@
 
         return resp, body
 
+    @classmethod
+    def create_image_from_server(cls, server_id, **kwargs):
+        """Wrapper utility that returns a test server."""
+        name = rand_name(cls.__name__ + "-image")
+        if 'name' in kwargs:
+            name = kwargs.pop('name')
+
+        resp, image = cls.images_client.create_image(
+            server_id, name)
+        image_id = parse_image_id(resp['location'])
+        cls.images.append(image_id)
+
+        if 'wait_until' in kwargs:
+            cls.images_client.wait_for_image_status(image_id,
+                                                    kwargs['wait_until'])
+            resp, image = cls.images_client.get_image(image_id)
+
+        return resp, image
+
     def wait_for(self, condition):
         """Repeatedly calls condition() until a timeout."""
         start_time = int(time.time())
diff --git a/tempest/api/compute/images/test_list_image_filters.py b/tempest/api/compute/images/test_list_image_filters.py
index 9db28ad..5c6b630 100644
--- a/tempest/api/compute/images/test_list_image_filters.py
+++ b/tempest/api/compute/images/test_list_image_filters.py
@@ -16,12 +16,15 @@
 #    under the License.
 
 from tempest.api.compute import base
+from tempest.common import log as logging
 from tempest.common.utils.data_utils import parse_image_id
-from tempest.common.utils.data_utils import rand_name
 from tempest import exceptions
 from tempest.test import attr
 
 
+LOG = logging.getLogger(__name__)
+
+
 class ListImageFiltersTestJSON(base.BaseComputeTest):
     _interface = 'json'
 
@@ -29,6 +32,7 @@
     def setUpClass(cls):
         super(ListImageFiltersTestJSON, cls).setUpClass()
         cls.client = cls.images_client
+        cls.image_ids = []
 
         try:
             resp, cls.server1 = cls.create_server()
@@ -38,9 +42,7 @@
                                                       'ACTIVE')
 
             # Create images to be used in the filter tests
-            image1_name = rand_name('image')
-            resp, body = cls.client.create_image(cls.server1['id'],
-                                                 image1_name)
+            resp, body = cls.create_image_from_server(cls.server1['id'])
             cls.image1_id = parse_image_id(resp['location'])
             cls.client.wait_for_image_resp_code(cls.image1_id, 200)
             cls.client.wait_for_image_status(cls.image1_id, 'ACTIVE')
@@ -49,35 +51,23 @@
             # Servers have a hidden property for when they are being imaged
             # Performing back-to-back create image calls on a single
             # server will sometimes cause failures
-            image3_name = rand_name('image')
-            resp, body = cls.client.create_image(cls.server2['id'],
-                                                 image3_name)
+            resp, body = cls.create_image_from_server(cls.server2['id'])
             cls.image3_id = parse_image_id(resp['location'])
             cls.client.wait_for_image_resp_code(cls.image3_id, 200)
             cls.client.wait_for_image_status(cls.image3_id, 'ACTIVE')
             resp, cls.image3 = cls.client.get_image(cls.image3_id)
 
-            image2_name = rand_name('image')
-            resp, body = cls.client.create_image(cls.server1['id'],
-                                                 image2_name)
+            resp, body = cls.create_image_from_server(cls.server1['id'])
             cls.image2_id = parse_image_id(resp['location'])
             cls.client.wait_for_image_resp_code(cls.image2_id, 200)
+
             cls.client.wait_for_image_status(cls.image2_id, 'ACTIVE')
             resp, cls.image2 = cls.client.get_image(cls.image2_id)
-        except Exception:
-            cls.clear_servers()
-            cls.client.delete_image(cls.image1_id)
-            cls.client.delete_image(cls.image2_id)
-            cls.client.delete_image(cls.image3_id)
+        except Exception as exc:
+            LOG.exception(exc)
+            cls.tearDownClass()
             raise
 
-    @classmethod
-    def tearDownClass(cls):
-        cls.client.delete_image(cls.image1_id)
-        cls.client.delete_image(cls.image2_id)
-        cls.client.delete_image(cls.image3_id)
-        super(ListImageFiltersTestJSON, cls).tearDownClass()
-
     @attr(type=['negative', 'gate'])
     def test_get_image_not_existing(self):
         # Check raises a NotFound
diff --git a/tempest/api/orchestration/stacks/test_stacks.py b/tempest/api/orchestration/stacks/test_stacks.py
index 8847c08..5fed581 100644
--- a/tempest/api/orchestration/stacks/test_stacks.py
+++ b/tempest/api/orchestration/stacks/test_stacks.py
@@ -45,19 +45,19 @@
 
         # count how many stacks to start with
         resp, body = self.client.list_stacks()
-        stack_count = len(body['stacks'])
 
         # create the stack
         stack_identifier = self.create_stack(
             stack_name, self.empty_template)
+        stack_id = stack_identifier.split('/')[1]
 
         # wait for create complete (with no resources it should be instant)
         self.client.wait_for_stack_status(stack_identifier, 'CREATE_COMPLETE')
 
-        # stack count will increment by 1
+        # check for stack in list
         resp, body = self.client.list_stacks()
-        self.assertEqual(stack_count + 1, len(body['stacks']),
-                         'Expected stack count to increment by 1')
+        list_ids = list([stack['id'] for stack in body['stacks']])
+        self.assertIn(stack_id, list_ids)
 
         # fetch the stack
         resp, body = self.client.get_stack(stack_identifier)
@@ -68,7 +68,6 @@
         self.assertEqual('CREATE_COMPLETE', body['stack_status'])
 
         # fetch the stack by id
-        stack_id = stack_identifier.split('/')[1]
         resp, body = self.client.get_stack(stack_id)
         self.assertEqual('CREATE_COMPLETE', body['stack_status'])
 
diff --git a/test-requirements.txt b/test-requirements.txt
index 27851da..3912695 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -3,8 +3,6 @@
 pyflakes==0.7.2
 flake8==2.0
 hacking>=0.5.3,<0.6
-#
-#TODO(afazekas): ensure pg_config installed
 psycopg2
 # needed for doc build
 sphinx>=1.1.2