Merge "Full response for VolumeHostsClient methods"
diff --git a/tempest/api/data_processing/base.py b/tempest/api/data_processing/base.py
index 904cbb6..5d78539 100644
--- a/tempest/api/data_processing/base.py
+++ b/tempest/api/data_processing/base.py
@@ -297,6 +297,7 @@
flavor_id,
node_configs,
**kwargs)
+ resp_body = resp_body['node_group_template']
# store id of created node group template
cls._node_group_templates.append(resp_body['id'])
@@ -316,6 +317,7 @@
node_groups,
cluster_configs,
**kwargs)
+ resp_body = resp_body['cluster_template']
# store id of created cluster template
cls._cluster_templates.append(resp_body['id'])
@@ -330,6 +332,7 @@
removed in tearDownClass method.
"""
resp_body = cls.client.create_data_source(name, type, url, **kwargs)
+ resp_body = resp_body['data_source']
# store id of created data source
cls._data_sources.append(resp_body['id'])
@@ -343,6 +346,7 @@
be automatically removed in tearDownClass method.
"""
resp_body = cls.client.create_job_binary_internal(name, data)
+ resp_body = resp_body['job_binary_internal']
# store id of created job binary internal
cls._job_binary_internals.append(resp_body['id'])
@@ -357,6 +361,7 @@
removed in tearDownClass method.
"""
resp_body = cls.client.create_job_binary(name, url, extra, **kwargs)
+ resp_body = resp_body['job_binary']
# store id of created job binary
cls._job_binaries.append(resp_body['id'])
@@ -372,6 +377,7 @@
"""
resp_body = cls.client.create_job(name,
job_type, mains, libs, **kwargs)
+ resp_body = resp_body['job']
# store id of created job
cls._jobs.append(resp_body['id'])
@@ -400,7 +406,7 @@
"""
if not cls.default_plugin:
return None
- plugin = cls.client.get_plugin(cls.default_plugin)
+ plugin = cls.client.get_plugin(cls.default_plugin)['plugin']
for version in DEFAULT_TEMPLATES[cls.default_plugin].keys():
if version in plugin['versions']:
diff --git a/tempest/api/data_processing/test_cluster_templates.py b/tempest/api/data_processing/test_cluster_templates.py
index e357a85..42cbd14 100644
--- a/tempest/api/data_processing/test_cluster_templates.py
+++ b/tempest/api/data_processing/test_cluster_templates.py
@@ -98,7 +98,7 @@
template_info = self._create_cluster_template()
# check for cluster template in list
- templates = self.client.list_cluster_templates()
+ templates = self.client.list_cluster_templates()['cluster_templates']
templates_info = [(template['id'], template['name'])
for template in templates]
self.assertIn(template_info, templates_info)
@@ -110,6 +110,7 @@
# check cluster template fetch by id
template = self.client.get_cluster_template(template_id)
+ template = template['cluster_template']
self.assertEqual(template_name, template['name'])
self.assertDictContainsSubset(self.cluster_template, template)
diff --git a/tempest/api/data_processing/test_data_sources.py b/tempest/api/data_processing/test_data_sources.py
index dd16b2f..67d09a0 100644
--- a/tempest/api/data_processing/test_data_sources.py
+++ b/tempest/api/data_processing/test_data_sources.py
@@ -68,13 +68,13 @@
def _list_data_sources(self, source_info):
# check for data source in list
- sources = self.client.list_data_sources()
+ sources = self.client.list_data_sources()['data_sources']
sources_info = [(source['id'], source['name']) for source in sources]
self.assertIn(source_info, sources_info)
def _get_data_source(self, source_id, source_name, source_body):
# check data source fetch by id
- source = self.client.get_data_source(source_id)
+ source = self.client.get_data_source(source_id)['data_source']
self.assertEqual(source_name, source['name'])
self.assertDictContainsSubset(source_body, source)
diff --git a/tempest/api/data_processing/test_job_binaries.py b/tempest/api/data_processing/test_job_binaries.py
index fb21270..98b7e24 100644
--- a/tempest/api/data_processing/test_job_binaries.py
+++ b/tempest/api/data_processing/test_job_binaries.py
@@ -80,7 +80,7 @@
binary_info = self._create_job_binary(self.swift_job_binary_with_extra)
# check for job binary in list
- binaries = self.client.list_job_binaries()
+ binaries = self.client.list_job_binaries()['binaries']
binaries_info = [(binary['id'], binary['name']) for binary in binaries]
self.assertIn(binary_info, binaries_info)
@@ -91,7 +91,7 @@
self._create_job_binary(self.swift_job_binary_with_extra))
# check job binary fetch by id
- binary = self.client.get_job_binary(binary_id)
+ binary = self.client.get_job_binary(binary_id)['job_binary']
self.assertEqual(binary_name, binary['name'])
self.assertDictContainsSubset(self.swift_job_binary, binary)
@@ -115,7 +115,7 @@
binary_info = self._create_job_binary(self.internal_db_job_binary)
# check for job binary in list
- binaries = self.client.list_job_binaries()
+ binaries = self.client.list_job_binaries()['binaries']
binaries_info = [(binary['id'], binary['name']) for binary in binaries]
self.assertIn(binary_info, binaries_info)
@@ -126,7 +126,7 @@
self._create_job_binary(self.internal_db_job_binary))
# check job binary fetch by id
- binary = self.client.get_job_binary(binary_id)
+ binary = self.client.get_job_binary(binary_id)['job_binary']
self.assertEqual(binary_name, binary['name'])
self.assertDictContainsSubset(self.internal_db_job_binary, binary)
diff --git a/tempest/api/data_processing/test_job_binary_internals.py b/tempest/api/data_processing/test_job_binary_internals.py
index 3d76ebe..6919fa5 100644
--- a/tempest/api/data_processing/test_job_binary_internals.py
+++ b/tempest/api/data_processing/test_job_binary_internals.py
@@ -57,7 +57,7 @@
binary_info = self._create_job_binary_internal()
# check for job binary internal in list
- binaries = self.client.list_job_binary_internals()
+ binaries = self.client.list_job_binary_internals()['binaries']
binaries_info = [(binary['id'], binary['name']) for binary in binaries]
self.assertIn(binary_info, binaries_info)
@@ -68,7 +68,7 @@
# check job binary internal fetch by id
binary = self.client.get_job_binary_internal(binary_id)
- self.assertEqual(binary_name, binary['name'])
+ self.assertEqual(binary_name, binary['job_binary_internal']['name'])
@test.attr(type='smoke')
@test.idempotent_id('b3568c33-4eed-40d5-aae4-6ff3b2ac58f5')
diff --git a/tempest/api/data_processing/test_jobs.py b/tempest/api/data_processing/test_jobs.py
index 83eb54d..7798056 100644
--- a/tempest/api/data_processing/test_jobs.py
+++ b/tempest/api/data_processing/test_jobs.py
@@ -71,7 +71,7 @@
job_info = self._create_job()
# check for job in list
- jobs = self.client.list_jobs()
+ jobs = self.client.list_jobs()['jobs']
jobs_info = [(job['id'], job['name']) for job in jobs]
self.assertIn(job_info, jobs_info)
@@ -81,7 +81,7 @@
job_id, job_name = self._create_job()
# check job fetch by id
- job = self.client.get_job(job_id)
+ job = self.client.get_job(job_id)['job']
self.assertEqual(job_name, job['name'])
@test.attr(type='smoke')
diff --git a/tempest/api/data_processing/test_node_group_templates.py b/tempest/api/data_processing/test_node_group_templates.py
index 102799d..388bb58 100644
--- a/tempest/api/data_processing/test_node_group_templates.py
+++ b/tempest/api/data_processing/test_node_group_templates.py
@@ -65,6 +65,7 @@
# check for node group template in list
templates = self.client.list_node_group_templates()
+ templates = templates['node_group_templates']
templates_info = [(template['id'], template['name'])
for template in templates]
self.assertIn(template_info, templates_info)
@@ -76,6 +77,7 @@
# check node group template fetch by id
template = self.client.get_node_group_template(template_id)
+ template = template['node_group_template']
self.assertEqual(template_name, template['name'])
self.assertDictContainsSubset(self.node_group_template, template)
diff --git a/tempest/api/data_processing/test_plugins.py b/tempest/api/data_processing/test_plugins.py
index 92a5bd0..14594e4 100644
--- a/tempest/api/data_processing/test_plugins.py
+++ b/tempest/api/data_processing/test_plugins.py
@@ -25,7 +25,7 @@
It ensures main plugins availability.
"""
- plugins = self.client.list_plugins()
+ plugins = self.client.list_plugins()['plugins']
plugins_names = [plugin['name'] for plugin in plugins]
for enabled_plugin in CONF.data_processing_feature_enabled.plugins:
self.assertIn(enabled_plugin, plugins_names)
@@ -41,12 +41,13 @@
@test.idempotent_id('53cf6487-2cfb-4a6f-8671-97c542c6e901')
def test_plugin_get(self):
for plugin_name in self._list_all_plugin_names():
- plugin = self.client.get_plugin(plugin_name)
+ plugin = self.client.get_plugin(plugin_name)['plugin']
self.assertEqual(plugin_name, plugin['name'])
for plugin_version in plugin['versions']:
detailed_plugin = self.client.get_plugin(plugin_name,
plugin_version)
+ detailed_plugin = detailed_plugin['plugin']
self.assertEqual(plugin_name, detailed_plugin['name'])
# check that required image tags contains name and version
diff --git a/tempest/api/identity/admin/v3/test_endpoints.py b/tempest/api/identity/admin/v3/test_endpoints.py
index 9a8104f..26109cf 100644
--- a/tempest/api/identity/admin/v3/test_endpoints.py
+++ b/tempest/api/identity/admin/v3/test_endpoints.py
@@ -36,6 +36,7 @@
cls.service_data =\
cls.service_client.create_service(s_name, s_type,
description=s_description)
+ cls.service_data = cls.service_data['service']
cls.service_id = cls.service_data['id']
cls.service_ids.append(cls.service_id)
# Create endpoints so as to use for LIST and GET test cases
@@ -110,6 +111,7 @@
service2 =\
self.service_client.create_service(s_name, s_type,
description=s_description)
+ service2 = service2['service']
self.service_ids.append(service2['id'])
# Updating endpoint with new values
region2 = data_utils.rand_name('region')
diff --git a/tempest/api/identity/admin/v3/test_endpoints_negative.py b/tempest/api/identity/admin/v3/test_endpoints_negative.py
index b043415..87c30dc 100644
--- a/tempest/api/identity/admin/v3/test_endpoints_negative.py
+++ b/tempest/api/identity/admin/v3/test_endpoints_negative.py
@@ -38,7 +38,8 @@
s_description = data_utils.rand_name('description')
cls.service_data = (
cls.service_client.create_service(s_name, s_type,
- description=s_description))
+ description=s_description)
+ ['service'])
cls.service_id = cls.service_data['id']
cls.service_ids.append(cls.service_id)
diff --git a/tempest/api/identity/admin/v3/test_services.py b/tempest/api/identity/admin/v3/test_services.py
index 95a7dcc..d920f64 100644
--- a/tempest/api/identity/admin/v3/test_services.py
+++ b/tempest/api/identity/admin/v3/test_services.py
@@ -37,7 +37,7 @@
serv_type = data_utils.rand_name('type')
desc = data_utils.rand_name('description')
create_service = self.service_client.create_service(
- serv_type, name=name, description=desc)
+ serv_type, name=name, description=desc)['service']
self.addCleanup(self._del_service, create_service['id'])
self.assertIsNotNone(create_service['id'])
@@ -50,13 +50,13 @@
resp1_desc = create_service['description']
s_desc2 = data_utils.rand_name('desc2')
update_service = self.service_client.update_service(
- s_id, description=s_desc2)
+ s_id, description=s_desc2)['service']
resp2_desc = update_service['description']
self.assertNotEqual(resp1_desc, resp2_desc)
# Get service
- fetched_service = self.service_client.get_service(s_id)
+ fetched_service = self.service_client.get_service(s_id)['service']
resp3_desc = fetched_service['description']
self.assertEqual(resp2_desc, resp3_desc)
@@ -68,7 +68,7 @@
name = data_utils.rand_name('service')
serv_type = data_utils.rand_name('type')
service = self.service_client.create_service(
- serv_type, name=name)
+ serv_type, name=name)['service']
self.addCleanup(self.service_client.delete_service, service['id'])
self.assertIn('id', service)
expected_data = {'name': name, 'type': serv_type}
@@ -82,13 +82,13 @@
name = data_utils.rand_name('service')
serv_type = data_utils.rand_name('type')
create_service = self.service_client.create_service(
- serv_type, name=name)
+ serv_type, name=name)['service']
self.addCleanup(self.service_client.delete_service,
create_service['id'])
service_ids.append(create_service['id'])
# List and Verify Services
- services = self.service_client.list_services()
+ services = self.service_client.list_services()['services']
fetched_ids = [service['id'] for service in services]
found = [s for s in fetched_ids if s in service_ids]
self.assertEqual(len(found), len(service_ids))
diff --git a/tempest/api/volume/test_availability_zone.py b/tempest/api/volume/test_availability_zone.py
index f188fa9..366b8d2 100644
--- a/tempest/api/volume/test_availability_zone.py
+++ b/tempest/api/volume/test_availability_zone.py
@@ -31,7 +31,8 @@
@test.idempotent_id('01f1ae88-eba9-4c6b-a011-6f7ace06b725')
def test_get_availability_zone_list(self):
# List of availability zone
- availability_zone = self.client.list_availability_zones()
+ availability_zone = (self.client.list_availability_zones()
+ ['availabilityZoneInfo'])
self.assertTrue(len(availability_zone) > 0)
diff --git a/tempest/common/compute.py b/tempest/common/compute.py
index 06e3493..05ea393 100644
--- a/tempest/common/compute.py
+++ b/tempest/common/compute.py
@@ -26,7 +26,7 @@
LOG = logging.getLogger(__name__)
-def create_test_server(clients, validatable, validation_resources=None,
+def create_test_server(clients, validatable=False, validation_resources=None,
tenant_network=None, **kwargs):
"""Common wrapper utility returning a test server.
diff --git a/tempest/scenario/test_dashboard_basic_ops.py b/tempest/scenario/test_dashboard_basic_ops.py
index eb018eb..8e91a6d 100644
--- a/tempest/scenario/test_dashboard_basic_ops.py
+++ b/tempest/scenario/test_dashboard_basic_ops.py
@@ -26,6 +26,7 @@
class HorizonHTMLParser(HTMLParser.HTMLParser):
csrf_token = None
region = None
+ login = None
def _find_name(self, attrs, name):
for attrpair in attrs:
@@ -39,12 +40,20 @@
return attrpair[1]
return None
+ def _find_attr_value(self, attrs, attr_name):
+ for attrpair in attrs:
+ if attrpair[0] == attr_name:
+ return attrpair[1]
+ return None
+
def handle_starttag(self, tag, attrs):
if tag == 'input':
if self._find_name(attrs, 'csrfmiddlewaretoken'):
self.csrf_token = self._find_value(attrs)
if self._find_name(attrs, 'region'):
self.region = self._find_value(attrs)
+ if tag == 'form':
+ self.login = self._find_attr_value(attrs, 'action')
class TestDashboardBasicOps(manager.ScenarioTest):
@@ -79,8 +88,12 @@
parser = HorizonHTMLParser()
parser.feed(response)
+ # construct login url for dashboard, discovery accomodates non-/ web
+ # root for dashboard
+ login_url = CONF.dashboard.dashboard_url + parser.login[1:]
+
# Prepare login form request
- req = request.Request(CONF.dashboard.login_url)
+ req = request.Request(login_url)
req.add_header('Content-type', 'application/x-www-form-urlencoded')
req.add_header('Referer', CONF.dashboard.dashboard_url)
params = {'username': username,
diff --git a/tempest/scenario/test_server_basic_ops.py b/tempest/scenario/test_server_basic_ops.py
index f61b151..3019cc4 100644
--- a/tempest/scenario/test_server_basic_ops.py
+++ b/tempest/scenario/test_server_basic_ops.py
@@ -16,6 +16,7 @@
from oslo_log import log as logging
from tempest import config
+from tempest import exceptions
from tempest.scenario import manager
from tempest.scenario import utils as test_utils
from tempest import test
@@ -98,9 +99,24 @@
def verify_metadata(self):
if self.run_ssh and CONF.compute_feature_enabled.metadata_service:
# Verify metadata service
- result = self.ssh_client.exec_command(
- "curl http://169.254.169.254/latest/meta-data/public-ipv4")
- self.assertEqual(self.floating_ip['ip'], result)
+ md_url = 'http://169.254.169.254/latest/meta-data/public-ipv4'
+
+ def exec_cmd_and_verify_output():
+ cmd = 'curl ' + md_url
+ floating_ip = self.floating_ip['ip']
+ result = self.ssh_client.exec_command(cmd)
+ if result:
+ msg = ('Failed while verifying metadata on server. Result '
+ 'of command "%s" is NOT "%s".' % (cmd, floating_ip))
+ self.assertEqual(floating_ip, result, msg)
+ return 'Verification is successful!'
+
+ if not test.call_until_true(exec_cmd_and_verify_output,
+ CONF.compute.build_timeout,
+ CONF.compute.build_interval):
+ raise exceptions.TimeoutException('Timed out while waiting to '
+ 'verify metadata on server. '
+ '%s is empty.' % md_url)
@test.idempotent_id('7fff3fb3-91d8-4fd0-bd7d-0204f1f180ba')
@test.attr(type='smoke')
diff --git a/tempest/services/compute/json/servers_client.py b/tempest/services/compute/json/servers_client.py
index a99a1f5..f523f11 100644
--- a/tempest/services/compute/json/servers_client.py
+++ b/tempest/services/compute/json/servers_client.py
@@ -206,15 +206,7 @@
post_body)
if response_key is not None:
body = json.loads(body)
- # Check for Schema as 'None' because if we do not have any server
- # action schema implemented yet then they can pass 'None' to skip
- # the validation.Once all server action has their schema
- # implemented then, this check can be removed if every actions are
- # supposed to validate their response.
- # TODO(GMann): Remove the below 'if' check once all server actions
- # schema are implemented.
- if schema is not None:
- self.validate_response(schema, resp, body)
+ self.validate_response(schema, resp, body)
body = body[response_key]
else:
self.validate_response(schema, resp, body)
diff --git a/tempest/services/data_processing/v1_1/data_processing_client.py b/tempest/services/data_processing/v1_1/data_processing_client.py
index bbc0f2a..cba4c42 100644
--- a/tempest/services/data_processing/v1_1/data_processing_client.py
+++ b/tempest/services/data_processing/v1_1/data_processing_client.py
@@ -39,8 +39,8 @@
self.expected_success(resp_status, resp.status)
return resp, body
- def _request_check_and_parse_resp(self, request_func, uri, resp_status,
- resource_name, *args, **kwargs):
+ def _request_check_and_parse_resp(self, request_func, uri,
+ resp_status, *args, **kwargs):
"""Make a request using specified request_func, check response status
code and parse response body.
@@ -50,36 +50,19 @@
resp, body = request_func(uri, headers=headers, *args, **kwargs)
self.expected_success(resp_status, resp.status)
body = json.loads(body)
- return service_client.ResponseBody(resp, body[resource_name])
-
- def _request_check_and_parse_resp_list(self, request_func, uri,
- resp_status, resource_name,
- *args, **kwargs):
- """Make a request using specified request_func, check response status
- code and parse response body.
-
- It returns a ResponseBodyList.
- """
- headers = {'Content-Type': 'application/json'}
- resp, body = request_func(uri, headers=headers, *args, **kwargs)
- self.expected_success(resp_status, resp.status)
- body = json.loads(body)
- return service_client.ResponseBodyList(resp, body[resource_name])
+ return service_client.ResponseBody(resp, body)
def list_node_group_templates(self):
"""List all node group templates for a user."""
uri = 'node-group-templates'
- return self._request_check_and_parse_resp_list(self.get, uri,
- 200,
- 'node_group_templates')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def get_node_group_template(self, tmpl_id):
"""Returns the details of a single node group template."""
uri = 'node-group-templates/%s' % tmpl_id
- return self._request_check_and_parse_resp(self.get, uri,
- 200, 'node_group_template')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def create_node_group_template(self, name, plugin_name, hadoop_version,
node_processes, flavor_id,
@@ -100,7 +83,6 @@
'node_configs': node_configs or dict(),
})
return self._request_check_and_parse_resp(self.post, uri, 202,
- 'node_group_template',
body=json.dumps(body))
def delete_node_group_template(self, tmpl_id):
@@ -113,8 +95,7 @@
"""List all enabled plugins."""
uri = 'plugins'
- return self._request_check_and_parse_resp_list(self.get,
- uri, 200, 'plugins')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def get_plugin(self, plugin_name, plugin_version=None):
"""Returns the details of a single plugin."""
@@ -122,22 +103,19 @@
uri = 'plugins/%s' % plugin_name
if plugin_version:
uri += '/%s' % plugin_version
- return self._request_check_and_parse_resp(self.get, uri, 200, 'plugin')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def list_cluster_templates(self):
"""List all cluster templates for a user."""
uri = 'cluster-templates'
- return self._request_check_and_parse_resp_list(self.get, uri,
- 200,
- 'cluster_templates')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def get_cluster_template(self, tmpl_id):
"""Returns the details of a single cluster template."""
uri = 'cluster-templates/%s' % tmpl_id
- return self._request_check_and_parse_resp(self.get,
- uri, 200, 'cluster_template')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def create_cluster_template(self, name, plugin_name, hadoop_version,
node_groups, cluster_configs=None,
@@ -157,7 +135,6 @@
'cluster_configs': cluster_configs or dict(),
})
return self._request_check_and_parse_resp(self.post, uri, 202,
- 'cluster_template',
body=json.dumps(body))
def delete_cluster_template(self, tmpl_id):
@@ -170,16 +147,13 @@
"""List all data sources for a user."""
uri = 'data-sources'
- return self._request_check_and_parse_resp_list(self.get,
- uri, 200,
- 'data_sources')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def get_data_source(self, source_id):
"""Returns the details of a single data source."""
uri = 'data-sources/%s' % source_id
- return self._request_check_and_parse_resp(self.get,
- uri, 200, 'data_source')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def create_data_source(self, name, data_source_type, url, **kwargs):
"""Creates data source with specified params.
@@ -195,8 +169,7 @@
'url': url
})
return self._request_check_and_parse_resp(self.post, uri,
- 202, 'data_source',
- body=json.dumps(body))
+ 202, body=json.dumps(body))
def delete_data_source(self, source_id):
"""Deletes the specified data source by id."""
@@ -208,22 +181,19 @@
"""List all job binary internals for a user."""
uri = 'job-binary-internals'
- return self._request_check_and_parse_resp_list(self.get,
- uri, 200, 'binaries')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job_binary_internal(self, job_binary_id):
"""Returns the details of a single job binary internal."""
uri = 'job-binary-internals/%s' % job_binary_id
- return self._request_check_and_parse_resp(self.get, uri,
- 200, 'job_binary_internal')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job_binary_internal(self, name, data):
"""Creates job binary internal with specified params."""
uri = 'job-binary-internals/%s' % name
- return self._request_check_and_parse_resp(self.put, uri, 202,
- 'job_binary_internal', data)
+ return self._request_check_and_parse_resp(self.put, uri, 202, data)
def delete_job_binary_internal(self, job_binary_id):
"""Deletes the specified job binary internal by id."""
@@ -241,15 +211,13 @@
"""List all job binaries for a user."""
uri = 'job-binaries'
- return self._request_check_and_parse_resp_list(self.get,
- uri, 200, 'binaries')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job_binary(self, job_binary_id):
"""Returns the details of a single job binary."""
uri = 'job-binaries/%s' % job_binary_id
- return self._request_check_and_parse_resp(self.get,
- uri, 200, 'job_binary')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job_binary(self, name, url, extra=None, **kwargs):
"""Creates job binary with specified params.
@@ -265,8 +233,7 @@
'extra': extra or dict(),
})
return self._request_check_and_parse_resp(self.post, uri,
- 202, 'job_binary',
- body=json.dumps(body))
+ 202, body=json.dumps(body))
def delete_job_binary(self, job_binary_id):
"""Deletes the specified job binary by id."""
@@ -284,14 +251,13 @@
"""List all jobs for a user."""
uri = 'jobs'
- return self._request_check_and_parse_resp_list(self.get,
- uri, 200, 'jobs')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job(self, job_id):
"""Returns the details of a single job."""
uri = 'jobs/%s' % job_id
- return self._request_check_and_parse_resp(self.get, uri, 200, 'job')
+ return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job(self, name, job_type, mains, libs=None, **kwargs):
"""Creates job with specified params.
@@ -307,8 +273,8 @@
'mains': mains,
'libs': libs or list(),
})
- return self._request_check_and_parse_resp(self.post, uri, 202,
- 'job', body=json.dumps(body))
+ return self._request_check_and_parse_resp(self.post, uri,
+ 202, body=json.dumps(body))
def delete_job(self, job_id):
"""Deletes the specified job by id."""
diff --git a/tempest/services/identity/v3/json/service_client.py b/tempest/services/identity/v3/json/service_client.py
index 52ff479..2acc3a8 100644
--- a/tempest/services/identity/v3/json/service_client.py
+++ b/tempest/services/identity/v3/json/service_client.py
@@ -23,7 +23,7 @@
def update_service(self, service_id, **kwargs):
"""Updates a service."""
- body = self.get_service(service_id)
+ body = self.get_service(service_id)['service']
name = kwargs.get('name', body['name'])
type = kwargs.get('type', body['type'])
desc = kwargs.get('description', body['description'])
@@ -36,7 +36,7 @@
resp, body = self.patch('services/%s' % service_id, patch_body)
self.expected_success(200, resp.status)
body = json.loads(body)
- return service_client.ResponseBody(resp, body['service'])
+ return service_client.ResponseBody(resp, body)
def get_service(self, service_id):
"""Get Service."""
@@ -44,7 +44,7 @@
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
- return service_client.ResponseBody(resp, body['service'])
+ return service_client.ResponseBody(resp, body)
def create_service(self, serv_type, name=None, description=None,
enabled=True):
@@ -58,7 +58,7 @@
resp, body = self.post("services", body)
self.expected_success(201, resp.status)
body = json.loads(body)
- return service_client.ResponseBody(resp, body["service"])
+ return service_client.ResponseBody(resp, body)
def delete_service(self, serv_id):
url = "services/" + serv_id
@@ -70,4 +70,4 @@
resp, body = self.get('services')
self.expected_success(200, resp.status)
body = json.loads(body)
- return service_client.ResponseBodyList(resp, body['services'])
+ return service_client.ResponseBody(resp, body)
diff --git a/tempest/services/volume/json/availability_zone_client.py b/tempest/services/volume/json/availability_zone_client.py
index 13d5d55..4d24ede 100644
--- a/tempest/services/volume/json/availability_zone_client.py
+++ b/tempest/services/volume/json/availability_zone_client.py
@@ -24,7 +24,7 @@
resp, body = self.get('os-availability-zone')
body = json.loads(body)
self.expected_success(200, resp.status)
- return service_client.ResponseBody(resp, body['availabilityZoneInfo'])
+ return service_client.ResponseBody(resp, body)
class VolumeAvailabilityZoneClient(BaseVolumeAvailabilityZoneClient):