Merge "Add caller to v1 image_client wait_for_image_status timeout exception"
diff --git a/tempest/api/compute/servers/test_create_server.py b/tempest/api/compute/servers/test_create_server.py
index e135eca..279dc51 100644
--- a/tempest/api/compute/servers/test_create_server.py
+++ b/tempest/api/compute/servers/test_create_server.py
@@ -102,6 +102,28 @@
                                                   self.password)
         self.assertTrue(linux_client.hostname_equals_servername(self.name))
 
+    @test.skip_because(bug="1306367", interface="xml")
+    @test.attr(type='gate')
+    def test_create_server_with_scheduler_hint_group(self):
+        # Create a server with the scheduler hint "group".
+        name = data_utils.rand_name('server_group')
+        policies = ['affinity']
+        resp, body = self.client.create_server_group(name=name,
+                                                     policies=policies)
+        self.assertEqual(200, resp.status)
+        group_id = body['id']
+        self.addCleanup(self.client.delete_server_group, group_id)
+
+        hints = {'group': group_id}
+        resp, server = self.create_test_server(sched_hints=hints,
+                                               wait_until='ACTIVE')
+        self.assertEqual(202, resp.status)
+
+        # Check a server is in the group
+        resp, server_group = self.client.get_server_group(group_id)
+        self.assertEqual(200, resp.status)
+        self.assertIn(server['id'], server_group['members'])
+
 
 class ServersWithSpecificFlavorTestJSON(base.BaseV2ComputeAdminTest):
     disk_config = 'AUTO'
diff --git a/tempest/api/compute/servers/test_server_actions.py b/tempest/api/compute/servers/test_server_actions.py
index d0fd876..71fcbff 100644
--- a/tempest/api/compute/servers/test_server_actions.py
+++ b/tempest/api/compute/servers/test_server_actions.py
@@ -14,6 +14,7 @@
 #    under the License.
 
 import base64
+import logging
 
 import testtools
 import urlparse
@@ -27,6 +28,8 @@
 
 CONF = config.CONF
 
+LOG = logging.getLogger(__name__)
+
 
 class ServerActionsTestJSON(base.BaseV2ComputeTest):
     run_ssh = CONF.compute.run_ssh
@@ -267,7 +270,14 @@
         # the oldest one should be deleted automatically in this test
         def _clean_oldest_backup(oldest_backup):
             if oldest_backup_exist:
-                self.os.image_client.delete_image(oldest_backup)
+                try:
+                    self.os.image_client.delete_image(oldest_backup)
+                except exceptions.NotFound:
+                    pass
+                else:
+                    LOG.warning("Deletion of oldest backup %s should not have "
+                                "been successful as it should have been "
+                                "deleted during rotation." % oldest_backup)
 
         image1_id = data_utils.parse_image_id(resp['location'])
         self.addCleanup(_clean_oldest_backup, image1_id)
diff --git a/tempest/api/compute/v3/servers/test_server_actions.py b/tempest/api/compute/v3/servers/test_server_actions.py
index e098311..3ee8050 100644
--- a/tempest/api/compute/v3/servers/test_server_actions.py
+++ b/tempest/api/compute/v3/servers/test_server_actions.py
@@ -13,6 +13,8 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import logging
+
 import testtools
 import urlparse
 
@@ -25,6 +27,8 @@
 
 CONF = config.CONF
 
+LOG = logging.getLogger(__name__)
+
 
 class ServerActionsV3Test(base.BaseV3ComputeTest):
     run_ssh = CONF.compute.run_ssh
@@ -260,7 +264,14 @@
         # the oldest one should be deleted automatically in this test
         def _clean_oldest_backup(oldest_backup):
             if oldest_backup_exist:
-                self.images_client.delete_image(oldest_backup)
+                try:
+                    self.images_client.delete_image(oldest_backup)
+                except exceptions.NotFound:
+                    pass
+                else:
+                    LOG.warning("Deletion of oldest backup %s should not have "
+                                "been successful as it should have been "
+                                "deleted during rotation." % oldest_backup)
 
         image1_id = data_utils.parse_image_id(resp['location'])
         self.addCleanup(_clean_oldest_backup, image1_id)
diff --git a/tempest/api/object_storage/test_object_services.py b/tempest/api/object_storage/test_object_services.py
index 06e63a4..1ef9aa1 100644
--- a/tempest/api/object_storage/test_object_services.py
+++ b/tempest/api/object_storage/test_object_services.py
@@ -13,11 +13,13 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import cStringIO as StringIO
 import hashlib
 import random
 import re
 from six import moves
 import time
+import zlib
 
 from tempest.api.object_storage import base
 from tempest.common import custom_matchers
@@ -61,7 +63,7 @@
 
         return object_name, data_segments
 
-    @test.attr(type='smoke')
+    @test.attr(type='gate')
     def test_create_object(self):
         # create object
         object_name = data_utils.rand_name(name='TestObject')
@@ -76,7 +78,242 @@
         self.assertEqual(resp['status'], '201')
         self.assertHeaders(resp, 'Object', 'PUT')
 
-    @test.attr(type='smoke')
+        # check uploaded content
+        _, body = self.object_client.get_object(self.container_name,
+                                                object_name)
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_content_disposition(self):
+        # create object with content_disposition
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        metadata = {}
+        metadata['content-disposition'] = 'inline'
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        resp, body = self.object_client.get_object(
+            self.container_name,
+            object_name,
+            metadata=None)
+        self.assertIn('content-disposition', resp)
+        self.assertEqual(resp['content-disposition'], 'inline')
+        self.assertEqual(body, data)
+
+    @test.attr(type='gate')
+    def test_create_object_with_content_encoding(self):
+        # create object with content_encoding
+        object_name = data_utils.rand_name(name='TestObject')
+
+        # put compressed string
+        data_before = 'x' * 2000
+        data = zlib.compress(data_before)
+        metadata = {}
+        metadata['content-encoding'] = 'deflate'
+
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        # download compressed object
+        metadata = {}
+        metadata['accept-encoding'] = 'deflate'
+        resp, body = self.object_client.get_object(
+            self.container_name,
+            object_name,
+            metadata=metadata)
+        self.assertEqual(body, data_before)
+
+    @test.attr(type='gate')
+    def test_create_object_with_etag(self):
+        # create object with etag
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        md5 = hashlib.md5(data).hexdigest()
+        metadata = {'Etag': md5}
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        # check uploaded content
+        _, body = self.object_client.get_object(self.container_name,
+                                                object_name)
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_expect_continue(self):
+        # create object with expect_continue
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        metadata = {'Expect': '100-continue'}
+        resp = self.custom_object_client.create_object_continue(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata)
+
+        self.assertIn('status', resp)
+        self.assertEqual(resp['status'], '100')
+
+        self.custom_object_client.create_object_continue(
+            self.container_name,
+            object_name,
+            data,
+            metadata=None)
+
+        # check uploaded content
+        _, body = self.object_client.get_object(self.container_name,
+                                                object_name)
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_transfer_encoding(self):
+        # create object with transfer_encoding
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string(1024)
+        status, _, resp_headers = self.object_client.put_object_with_chunk(
+            container=self.container_name,
+            name=object_name,
+            contents=StringIO.StringIO(data),
+            chunk_size=512)
+        self.assertEqual(status, 201)
+        self.assertHeaders(resp_headers, 'Object', 'PUT')
+
+        # check uploaded content
+        _, body = self.object_client.get_object(self.container_name,
+                                                object_name)
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_x_fresh_metadata(self):
+        # create object with x_fresh_metadata
+        object_name_base = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        metadata_1 = {'X-Object-Meta-test-meta': 'Meta'}
+        self.object_client.create_object(self.container_name,
+                                         object_name_base,
+                                         data,
+                                         metadata=metadata_1)
+        object_name = data_utils.rand_name(name='TestObject')
+        metadata_2 = {'X-Copy-From': '%s/%s' % (self.container_name,
+                                                object_name_base),
+                      'X-Fresh-Metadata': 'true'}
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            '',
+            metadata=metadata_2)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        resp, body = self.object_client.get_object(self.container_name,
+                                                   object_name)
+        self.assertNotIn('x-object-meta-test-meta', resp)
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_x_object_meta(self):
+        # create object with object_meta
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        metadata = {'X-Object-Meta-test-meta': 'Meta'}
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        resp, body = self.object_client.get_object(self.container_name,
+                                                   object_name)
+        self.assertIn('x-object-meta-test-meta', resp)
+        self.assertEqual(resp['x-object-meta-test-meta'], 'Meta')
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_x_object_metakey(self):
+        # create object with the blank value of metadata
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        metadata = {'X-Object-Meta-test-meta': ''}
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        resp, body = self.object_client.get_object(self.container_name,
+                                                   object_name)
+        self.assertIn('x-object-meta-test-meta', resp)
+        self.assertEqual(resp['x-object-meta-test-meta'], '')
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_x_remove_object_meta(self):
+        # create object with x_remove_object_meta
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        metadata_add = {'X-Object-Meta-test-meta': 'Meta'}
+        self.object_client.create_object(self.container_name,
+                                         object_name,
+                                         data,
+                                         metadata=metadata_add)
+        metadata_remove = {'X-Remove-Object-Meta-test-meta': 'Meta'}
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata_remove)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        resp, body = self.object_client.get_object(self.container_name,
+                                                   object_name)
+        self.assertNotIn('x-object-meta-test-meta', resp)
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
+    def test_create_object_with_x_remove_object_metakey(self):
+        # create object with the blank value of remove metadata
+        object_name = data_utils.rand_name(name='TestObject')
+        data = data_utils.arbitrary_string()
+        metadata_add = {'X-Object-Meta-test-meta': 'Meta'}
+        self.object_client.create_object(self.container_name,
+                                         object_name,
+                                         data,
+                                         metadata=metadata_add)
+        metadata_remove = {'X-Remove-Object-Meta-test-meta': ''}
+        resp, _ = self.object_client.create_object(
+            self.container_name,
+            object_name,
+            data,
+            metadata=metadata_remove)
+        self.assertEqual(resp['status'], '201')
+        self.assertHeaders(resp, 'Object', 'PUT')
+
+        resp, body = self.object_client.get_object(self.container_name,
+                                                   object_name)
+        self.assertNotIn('x-object-meta-test-meta', resp)
+        self.assertEqual(data, body)
+
+    @test.attr(type='gate')
     def test_delete_object(self):
         # create object
         object_name = data_utils.rand_name(name='TestObject')
diff --git a/tempest/auth.py b/tempest/auth.py
index 9c51edb..830dca9 100644
--- a/tempest/auth.py
+++ b/tempest/auth.py
@@ -213,7 +213,7 @@
         # build authenticated request
         # returns new request, it does not touch the original values
         _headers = copy.deepcopy(headers) if headers is not None else {}
-        _headers['X-Auth-Token'] = token
+        _headers['X-Auth-Token'] = str(token)
         if url is None or url == "":
             _url = base_url
         else:
@@ -223,7 +223,7 @@
             parts[2] = re.sub("/{2,}", "/", parts[2])
             _url = urlparse.urlunparse(parts)
         # no change to method or body
-        return _url, _headers, body
+        return str(_url), _headers, body
 
     def _auth_client(self):
         raise NotImplementedError
diff --git a/tempest/cmd/javelin.py b/tempest/cmd/javelin.py
index 20ee63e..1d46028 100755
--- a/tempest/cmd/javelin.py
+++ b/tempest/cmd/javelin.py
@@ -35,6 +35,7 @@
 from tempest.services.image.v2.json import image_client
 from tempest.services.object_storage import container_client
 from tempest.services.object_storage import object_client
+from tempest.services.volume.json import volumes_client
 
 OPTS = {}
 USERS = {}
@@ -60,6 +61,7 @@
         self.containers = container_client.ContainerClient(_auth)
         self.images = image_client.ImageClientV2JSON(_auth)
         self.flavors = flavors_client.FlavorsClientJSON(_auth)
+        self.volumes = volumes_client.VolumesClientJSON(_auth)
 
 
 def load_resources(fname):
@@ -190,6 +192,7 @@
         self.check_users()
         self.check_objects()
         self.check_servers()
+        self.check_volumes()
 
     def check_users(self):
         """Check that the users we expect to exist, do.
@@ -235,6 +238,21 @@
                              "Server %s is not pingable at %s" % (
                                  server['name'], addr))
 
+    def check_volumes(self):
+        """Check that the volumes are still there and attached."""
+        for volume in self.res['volumes']:
+            client = client_for_user(volume['owner'])
+            found = _get_volume_by_name(client, volume['name'])
+            self.assertIsNotNone(
+                found,
+                "Couldn't find expected volume %s" % volume['name'])
+
+            # Verify that a volume's attachment retrieved
+            server_id = _get_server_by_name(client, volume['server'])['id']
+            attachment = self.client.get_attachment_from_volume(volume)
+            self.assertEqual(volume['id'], attachment['volume_id'])
+            self.assertEqual(server_id, attachment['server_id'])
+
 
 #######################
 #
@@ -339,6 +357,40 @@
 
 #######################
 #
+# VOLUMES
+#
+#######################
+
+def _get_volume_by_name(client, name):
+    r, body = client.volumes.list_volumes()
+    for volume in body['volumes']:
+        if name == volume['name']:
+            return volume
+    return None
+
+
+def create_volumes(volumes):
+    for volume in volumes:
+        client = client_for_user(volume['owner'])
+
+        # only create a volume if the name isn't here
+        r, body = client.volumes.list_volumes()
+        if any(item['name'] == volume['name'] for item in body):
+            continue
+
+        client.volumes.create_volume(volume['name'], volume['size'])
+
+
+def attach_volumes(volumes):
+    for volume in volumes:
+        client = client_for_user(volume['owner'])
+
+        server_id = _get_server_by_name(client, volume['server'])['id']
+        client.volumes.attach_volume(volume['name'], server_id)
+
+
+#######################
+#
 # MAIN LOGIC
 #
 #######################
@@ -355,6 +407,8 @@
     create_objects(RES['objects'])
     create_images(RES['images'])
     create_servers(RES['servers'])
+    create_volumes(RES['volumes'])
+    attach_volumes(RES['volumes'])
 
 
 def get_options():
diff --git a/tempest/cmd/resources.yaml b/tempest/cmd/resources.yaml
index f7cb8a9..a1f567b 100644
--- a/tempest/cmd/resources.yaml
+++ b/tempest/cmd/resources.yaml
@@ -32,9 +32,14 @@
     aki: cirros-0.3.2-x86_64-vmlinuz
     ari: cirros-0.3.2-x86_64-initrd
 volumes:
-  - assegai:
-    - owner: javelin
-    - gb: 1
+  - name: assegai
+    server: peltast
+    owner: javelin
+    size: 1
+  - name: pifpouf
+    server: hoplite
+    owner: javelin
+    size: 2
 servers:
   - name: peltast
     owner: javelin
diff --git a/tempest/openstack/common/config/generator.py b/tempest/openstack/common/config/generator.py
index 8156cc5..664200e 100644
--- a/tempest/openstack/common/config/generator.py
+++ b/tempest/openstack/common/config/generator.py
@@ -150,7 +150,7 @@
 
 
 def _is_in_group(opt, group):
-    "Check if opt is in group."
+    """Check if opt is in group."""
     for value in group._opts.values():
         # NOTE(llu): Temporary workaround for bug #1262148, wait until
         # newly released oslo.config support '==' operator.
@@ -159,7 +159,7 @@
     return False
 
 
-def _guess_groups(opt, mod_obj):
+def _guess_groups(opt):
     # is it in the DEFAULT group?
     if _is_in_group(opt, cfg.CONF):
         return 'DEFAULT'
@@ -193,7 +193,7 @@
 
     ret = {}
     for opt in opts:
-        ret.setdefault(_guess_groups(opt, obj), []).append(opt)
+        ret.setdefault(_guess_groups(opt), []).append(opt)
     return ret.items()
 
 
@@ -223,6 +223,8 @@
 
 def _sanitize_default(name, value):
     """Set up a reasonably sensible default for pybasedir, my_ip and host."""
+    hostname = socket.gethostname()
+    fqdn = socket.getfqdn()
     if value.startswith(sys.prefix):
         # NOTE(jd) Don't use os.path.join, because it is likely to think the
         # second part is an absolute pathname and therefore drop the first
@@ -234,8 +236,13 @@
         return value.replace(BASEDIR, '')
     elif value == _get_my_ip():
         return '10.0.0.1'
-    elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name:
-        return 'tempest'
+    elif value in (hostname, fqdn):
+        if 'host' in name:
+            return 'tempest'
+    elif value.endswith(hostname):
+        return value.replace(hostname, 'tempest')
+    elif value.endswith(fqdn):
+        return value.replace(fqdn, 'tempest')
     elif value.strip() != value:
         return '"%s"' % value
     return value
@@ -246,7 +253,6 @@
     if not opt_help:
         sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
         opt_help = ""
-    opt_type = None
     try:
         opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
     except (ValueError, AttributeError) as err:
diff --git a/tempest/openstack/common/gettextutils.py b/tempest/openstack/common/gettextutils.py
index 17f66f7..6102e67 100644
--- a/tempest/openstack/common/gettextutils.py
+++ b/tempest/openstack/common/gettextutils.py
@@ -32,24 +32,113 @@
 from babel import localedata
 import six
 
-_localedir = os.environ.get('tempest'.upper() + '_LOCALEDIR')
-_t = gettext.translation('tempest', localedir=_localedir, fallback=True)
-
-# We use separate translation catalogs for each log level, so set up a
-# mapping between the log level name and the translator. The domain
-# for the log level is project_name + "-log-" + log_level so messages
-# for each level end up in their own catalog.
-_t_log_levels = dict(
-    (level, gettext.translation('tempest' + '-log-' + level,
-                                localedir=_localedir,
-                                fallback=True))
-    for level in ['info', 'warning', 'error', 'critical']
-)
-
 _AVAILABLE_LANGUAGES = {}
+
+# FIXME(dhellmann): Remove this when moving to oslo.i18n.
 USE_LAZY = False
 
 
+class TranslatorFactory(object):
+    """Create translator functions
+    """
+
+    def __init__(self, domain, lazy=False, localedir=None):
+        """Establish a set of translation functions for the domain.
+
+        :param domain: Name of translation domain,
+                       specifying a message catalog.
+        :type domain: str
+        :param lazy: Delays translation until a message is emitted.
+                     Defaults to False.
+        :type lazy: Boolean
+        :param localedir: Directory with translation catalogs.
+        :type localedir: str
+        """
+        self.domain = domain
+        self.lazy = lazy
+        if localedir is None:
+            localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
+        self.localedir = localedir
+
+    def _make_translation_func(self, domain=None):
+        """Return a new translation function ready for use.
+
+        Takes into account whether or not lazy translation is being
+        done.
+
+        The domain can be specified to override the default from the
+        factory, but the localedir from the factory is always used
+        because we assume the log-level translation catalogs are
+        installed in the same directory as the main application
+        catalog.
+
+        """
+        if domain is None:
+            domain = self.domain
+        if self.lazy:
+            return functools.partial(Message, domain=domain)
+        t = gettext.translation(
+            domain,
+            localedir=self.localedir,
+            fallback=True,
+        )
+        if six.PY3:
+            return t.gettext
+        return t.ugettext
+
+    @property
+    def primary(self):
+        "The default translation function."
+        return self._make_translation_func()
+
+    def _make_log_translation_func(self, level):
+        return self._make_translation_func(self.domain + '-log-' + level)
+
+    @property
+    def log_info(self):
+        "Translate info-level log messages."
+        return self._make_log_translation_func('info')
+
+    @property
+    def log_warning(self):
+        "Translate warning-level log messages."
+        return self._make_log_translation_func('warning')
+
+    @property
+    def log_error(self):
+        "Translate error-level log messages."
+        return self._make_log_translation_func('error')
+
+    @property
+    def log_critical(self):
+        "Translate critical-level log messages."
+        return self._make_log_translation_func('critical')
+
+
+# NOTE(dhellmann): When this module moves out of the incubator into
+# oslo.i18n, these global variables can be moved to an integration
+# module within each application.
+
+# Create the global translation functions.
+_translators = TranslatorFactory('tempest')
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
+
+# Translators for log levels.
+#
+# The abbreviated names are meant to reflect the usual use of a short
+# name like '_'. The "L" is for "log" and the other letter comes from
+# the level.
+_LI = _translators.log_info
+_LW = _translators.log_warning
+_LE = _translators.log_error
+_LC = _translators.log_critical
+
+# NOTE(dhellmann): End of globals that will move to the application's
+# integration module.
+
+
 def enable_lazy():
     """Convenience function for configuring _() to use lazy gettext
 
@@ -58,41 +147,18 @@
     your project is importing _ directly instead of using the
     gettextutils.install() way of importing the _ function.
     """
-    global USE_LAZY
+    # FIXME(dhellmann): This function will be removed in oslo.i18n,
+    # because the TranslatorFactory makes it superfluous.
+    global _, _LI, _LW, _LE, _LC, USE_LAZY
+    tf = TranslatorFactory('tempest', lazy=True)
+    _ = tf.primary
+    _LI = tf.log_info
+    _LW = tf.log_warning
+    _LE = tf.log_error
+    _LC = tf.log_critical
     USE_LAZY = True
 
 
-def _(msg):
-    if USE_LAZY:
-        return Message(msg, domain='tempest')
-    else:
-        if six.PY3:
-            return _t.gettext(msg)
-        return _t.ugettext(msg)
-
-
-def _log_translation(msg, level):
-    """Build a single translation of a log message
-    """
-    if USE_LAZY:
-        return Message(msg, domain='tempest' + '-log-' + level)
-    else:
-        translator = _t_log_levels[level]
-        if six.PY3:
-            return translator.gettext(msg)
-        return translator.ugettext(msg)
-
-# Translators for log levels.
-#
-# The abbreviated names are meant to reflect the usual use of a short
-# name like '_'. The "L" is for "log" and the other letter comes from
-# the level.
-_LI = functools.partial(_log_translation, level='info')
-_LW = functools.partial(_log_translation, level='warning')
-_LE = functools.partial(_log_translation, level='error')
-_LC = functools.partial(_log_translation, level='critical')
-
-
 def install(domain, lazy=False):
     """Install a _() function using the given translation domain.
 
@@ -112,26 +178,9 @@
                  any available locale.
     """
     if lazy:
-        # NOTE(mrodden): Lazy gettext functionality.
-        #
-        # The following introduces a deferred way to do translations on
-        # messages in OpenStack. We override the standard _() function
-        # and % (format string) operation to build Message objects that can
-        # later be translated when we have more information.
-        def _lazy_gettext(msg):
-            """Create and return a Message object.
-
-            Lazy gettext function for a given domain, it is a factory method
-            for a project/module to get a lazy gettext function for its own
-            translation domain (i.e. nova, glance, cinder, etc.)
-
-            Message encapsulates a string so that we can translate
-            it later when needed.
-            """
-            return Message(msg, domain=domain)
-
         from six import moves
-        moves.builtins.__dict__['_'] = _lazy_gettext
+        tf = TranslatorFactory(domain, lazy=True)
+        moves.builtins.__dict__['_'] = tf.primary
     else:
         localedir = '%s_LOCALEDIR' % domain.upper()
         if six.PY3:
@@ -274,13 +323,14 @@
     def __radd__(self, other):
         return self.__add__(other)
 
-    def __str__(self):
-        # NOTE(luisg): Logging in python 2.6 tries to str() log records,
-        # and it expects specifically a UnicodeError in order to proceed.
-        msg = _('Message objects do not support str() because they may '
-                'contain non-ascii characters. '
-                'Please use unicode() or translate() instead.')
-        raise UnicodeError(msg)
+    if six.PY2:
+        def __str__(self):
+            # NOTE(luisg): Logging in python 2.6 tries to str() log records,
+            # and it expects specifically a UnicodeError in order to proceed.
+            msg = _('Message objects do not support str() because they may '
+                    'contain non-ascii characters. '
+                    'Please use unicode() or translate() instead.')
+            raise UnicodeError(msg)
 
 
 def get_available_languages(domain):
diff --git a/tempest/openstack/common/importutils.py b/tempest/openstack/common/importutils.py
index 6c0d3b2..d5dd22f 100644
--- a/tempest/openstack/common/importutils.py
+++ b/tempest/openstack/common/importutils.py
@@ -24,10 +24,10 @@
 def import_class(import_str):
     """Returns a class from a string including module and class."""
     mod_str, _sep, class_str = import_str.rpartition('.')
+    __import__(mod_str)
     try:
-        __import__(mod_str)
         return getattr(sys.modules[mod_str], class_str)
-    except (ValueError, AttributeError):
+    except AttributeError:
         raise ImportError('Class %s cannot be found (%s)' %
                           (class_str,
                            traceback.format_exception(*sys.exc_info())))
diff --git a/tempest/services/compute/json/servers_client.py b/tempest/services/compute/json/servers_client.py
index 23c1e64..e1661c0 100644
--- a/tempest/services/compute/json/servers_client.py
+++ b/tempest/services/compute/json/servers_client.py
@@ -432,8 +432,9 @@
         return self.action(server_id, 'shelveOffload', None, **kwargs)
 
     def get_console_output(self, server_id, length):
+        kwargs = {'length': length} if length else {}
         return self.action(server_id, 'os-getConsoleOutput', 'output',
-                           common_schema.get_console_output, length=length)
+                           common_schema.get_console_output, **kwargs)
 
     def list_virtual_interfaces(self, server_id):
         """
diff --git a/tempest/services/compute/xml/servers_client.py b/tempest/services/compute/xml/servers_client.py
index 626e655..156d889 100644
--- a/tempest/services/compute/xml/servers_client.py
+++ b/tempest/services/compute/xml/servers_client.py
@@ -594,8 +594,9 @@
         return resp, body
 
     def get_console_output(self, server_id, length):
+        kwargs = {'length': length} if length else {}
         return self.action(server_id, 'os-getConsoleOutput', 'output',
-                           length=length)
+                           **kwargs)
 
     def list_virtual_interfaces(self, server_id):
         """
diff --git a/tempest/services/object_storage/object_client.py b/tempest/services/object_storage/object_client.py
index f3f4eb6..b2f8205 100644
--- a/tempest/services/object_storage/object_client.py
+++ b/tempest/services/object_storage/object_client.py
@@ -13,7 +13,9 @@
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import httplib
 import urllib
+import urlparse
 
 from tempest.common import http
 from tempest.common import rest_client
@@ -143,6 +145,31 @@
         resp, body = self.put(url, data)
         return resp, body
 
+    def put_object_with_chunk(self, container, name, contents, chunk_size):
+        """
+        Put an object with Transfer-Encoding header
+        """
+        if self.base_url is None:
+            self._set_auth()
+
+        headers = {'Transfer-Encoding': 'chunked'}
+        if self.token:
+            headers['X-Auth-Token'] = self.token
+
+        conn = put_object_connection(self.base_url, container, name, contents,
+                                     chunk_size, headers)
+
+        resp = conn.getresponse()
+        body = resp.read()
+
+        resp_headers = {}
+        for header, value in resp.getheaders():
+            resp_headers[header.lower()] = value
+
+        self._error_checker('PUT', None, headers, contents, resp, body)
+
+        return resp.status, resp.reason, resp_headers
+
 
 class ObjectClientCustomizedHeader(rest_client.RestClient):
 
@@ -220,3 +247,89 @@
         url = "%s/%s" % (str(container), str(object_name))
         resp, body = self.delete(url, headers=headers)
         return resp, body
+
+    def create_object_continue(self, container, object_name,
+                               data, metadata=None):
+        """Create storage object."""
+        headers = {}
+        if metadata:
+            for key in metadata:
+                headers[str(key)] = metadata[key]
+
+        if not data:
+            headers['content-length'] = '0'
+
+        if self.base_url is None:
+            self._set_auth()
+        headers['X-Auth-Token'] = self.token
+
+        conn = put_object_connection(self.base_url, str(container),
+                                     str(object_name), data, None, headers)
+
+        response = conn.response_class(conn.sock,
+                                       strict=conn.strict,
+                                       method=conn._method)
+        version, status, reason = response._read_status()
+        resp = {'version': version,
+                'status': str(status),
+                'reason': reason}
+
+        return resp
+
+
+def put_object_connection(base_url, container, name, contents=None,
+                          chunk_size=65536, headers=None, query_string=None):
+    """
+    Helper function to make connection to put object with httplib
+    :param base_url: base_url of an object client
+    :param container: container name that the object is in
+    :param name: object name to put
+    :param contents: a string or a file like object to read object data
+                     from; if None, a zero-byte put will be done
+    :param chunk_size: chunk size of data to write; it defaults to 65536;
+                       used only if the the contents object has a 'read'
+                       method, eg. file-like objects, ignored otherwise
+    :param headers: additional headers to include in the request, if any
+    :param query_string: if set will be appended with '?' to generated path
+    """
+    parsed = urlparse.urlparse(base_url)
+    if parsed.scheme == 'https':
+        conn = httplib.HTTPSConnection(parsed.netloc)
+    else:
+        conn = httplib.HTTPConnection(parsed.netloc)
+    path = str(parsed.path) + "/"
+    path += "%s/%s" % (str(container), str(name))
+
+    if query_string:
+        path += '?' + query_string
+    if headers:
+        headers = dict(headers)
+    else:
+        headers = {}
+    if hasattr(contents, 'read'):
+        conn.putrequest('PUT', path)
+        for header, value in headers.iteritems():
+            conn.putheader(header, value)
+        if 'Content-Length' not in headers:
+            if 'Transfer-Encoding' not in headers:
+                conn.putheader('Transfer-Encoding', 'chunked')
+            conn.endheaders()
+            chunk = contents.read(chunk_size)
+            while chunk:
+                conn.send('%x\r\n%s\r\n' % (len(chunk), chunk))
+                chunk = contents.read(chunk_size)
+            conn.send('0\r\n\r\n')
+        else:
+            conn.endheaders()
+            left = headers['Content-Length']
+            while left > 0:
+                size = chunk_size
+                if size > left:
+                    size = left
+                chunk = contents.read(size)
+                conn.send(chunk)
+                left -= len(chunk)
+    else:
+        conn.request('PUT', path, contents, headers)
+
+    return conn
diff --git a/test-requirements.txt b/test-requirements.txt
index 215f28b..13ef291 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -8,3 +8,4 @@
 mock>=1.0
 coverage>=3.6
 oslotest
+stevedore>=0.14
diff --git a/tools/config/check_uptodate.sh b/tools/config/check_uptodate.sh
index 528bd5b..0f0d77e 100755
--- a/tools/config/check_uptodate.sh
+++ b/tools/config/check_uptodate.sh
@@ -16,6 +16,10 @@
 trap "rm -rf $TEMPDIR" EXIT
 
 tools/config/generate_sample.sh -b ./ -p ${PROJECT_NAME} -o ${TEMPDIR}
+if [ $? != 0 ]
+then
+    exit 1
+fi
 
 if ! diff -u ${TEMPDIR}/${CFGFILE_NAME} ${CFGFILE}
 then
diff --git a/tools/config/generate_sample.sh b/tools/config/generate_sample.sh
index 20ddfbb..d22b2f0 100755
--- a/tools/config/generate_sample.sh
+++ b/tools/config/generate_sample.sh
@@ -1,5 +1,15 @@
 #!/usr/bin/env bash
 
+# Generate sample configuration for your project.
+#
+# Aside from the command line flags, it also respects a config file which
+# should be named oslo.config.generator.rc and be placed in the same directory.
+#
+# You can then export the following variables:
+# TEMPEST_CONFIG_GENERATOR_EXTRA_MODULES: list of modules to interrogate for options.
+# TEMPEST_CONFIG_GENERATOR_EXTRA_LIBRARIES: list of libraries to discover.
+# TEMPEST_CONFIG_GENERATOR_EXCLUDED_FILES: list of files to remove from automatic listing.
+
 print_hint() {
     echo "Try \`${0##*/} --help' for more information." >&2
 }
@@ -95,6 +105,10 @@
     source "$RC_FILE"
 fi
 
+for filename in ${TEMPEST_CONFIG_GENERATOR_EXCLUDED_FILES}; do
+    FILES="${FILES[@]/$filename/}"
+done
+
 for mod in ${TEMPEST_CONFIG_GENERATOR_EXTRA_MODULES}; do
     MODULES="$MODULES -m $mod"
 done
@@ -111,6 +125,11 @@
 MODULEPATH=${MODULEPATH:-$DEFAULT_MODULEPATH}
 OUTPUTFILE=$OUTPUTDIR/$PACKAGENAME.conf.sample
 python -m $MODULEPATH $MODULES $LIBRARIES $FILES > $OUTPUTFILE
+if [ $? != 0 ]
+then
+    echo "Can not generate $OUTPUTFILE"
+    exit 1
+fi
 
 # Hook to allow projects to append custom config file snippets
 CONCAT_FILES=$(ls $BASEDIR/tools/config/*.conf.sample 2>/dev/null)
diff --git a/tools/config/oslo.config.generator.rc b/tools/config/oslo.config.generator.rc
new file mode 100644
index 0000000..303e156
--- /dev/null
+++ b/tools/config/oslo.config.generator.rc
@@ -0,0 +1 @@
+MODULEPATH=tempest.common.generate_sample_tempest
diff --git a/tools/generate_sample.sh b/tools/generate_sample.sh
deleted file mode 100755
index 9b312c9..0000000
--- a/tools/generate_sample.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-MODULEPATH=tempest.common.generate_sample_tempest tools/config/generate_sample.sh $@
diff --git a/tox.ini b/tox.ini
index c1acde9..73546ec 100644
--- a/tox.ini
+++ b/tox.ini
@@ -85,7 +85,6 @@
        -r{toxinidir}/test-requirements.txt
 
 [testenv:pep8]
-setenv = MODULEPATH=tempest.common.generate_sample_tempest
 commands =
    flake8 {posargs}
    {toxinidir}/tools/config/check_uptodate.sh