Change data_processing client to return one value and update tests
Two methods returned string data and so were left as returning resp,data
Partially implements: blueprint clients-return-one-value
Change-Id: I08c8e8b3dd61e85afca174a8e4b3148e7ac6452a
diff --git a/tempest/api/data_processing/base.py b/tempest/api/data_processing/base.py
index 2ec1017..45c8488 100644
--- a/tempest/api/data_processing/base.py
+++ b/tempest/api/data_processing/base.py
@@ -77,12 +77,12 @@
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
- _, resp_body = cls.client.create_node_group_template(name, plugin_name,
- hadoop_version,
- node_processes,
- flavor_id,
- node_configs,
- **kwargs)
+ resp_body = cls.client.create_node_group_template(name, plugin_name,
+ hadoop_version,
+ node_processes,
+ flavor_id,
+ node_configs,
+ **kwargs)
# store id of created node group template
cls._node_group_templates.append(resp_body['id'])
@@ -97,11 +97,11 @@
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
- _, resp_body = cls.client.create_cluster_template(name, plugin_name,
- hadoop_version,
- node_groups,
- cluster_configs,
- **kwargs)
+ resp_body = cls.client.create_cluster_template(name, plugin_name,
+ hadoop_version,
+ node_groups,
+ cluster_configs,
+ **kwargs)
# store id of created cluster template
cls._cluster_templates.append(resp_body['id'])
@@ -115,7 +115,7 @@
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
- _, resp_body = cls.client.create_data_source(name, type, url, **kwargs)
+ resp_body = cls.client.create_data_source(name, type, url, **kwargs)
# store id of created data source
cls._data_sources.append(resp_body['id'])
@@ -128,7 +128,7 @@
It returns created object. All resources created in this method will
be automatically removed in tearDownClass method.
"""
- _, resp_body = cls.client.create_job_binary_internal(name, data)
+ resp_body = cls.client.create_job_binary_internal(name, data)
# store id of created job binary internal
cls._job_binary_internals.append(resp_body['id'])
@@ -142,7 +142,7 @@
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
- _, resp_body = cls.client.create_job_binary(name, url, extra, **kwargs)
+ resp_body = cls.client.create_job_binary(name, url, extra, **kwargs)
# store id of created job binary
cls._job_binaries.append(resp_body['id'])
@@ -156,8 +156,8 @@
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
- _, resp_body = cls.client.create_job(name,
- job_type, mains, libs, **kwargs)
+ resp_body = cls.client.create_job(name,
+ job_type, mains, libs, **kwargs)
# store id of created job
cls._jobs.append(resp_body['id'])
diff --git a/tempest/api/data_processing/test_cluster_templates.py b/tempest/api/data_processing/test_cluster_templates.py
index 537f90c..422ea5b 100644
--- a/tempest/api/data_processing/test_cluster_templates.py
+++ b/tempest/api/data_processing/test_cluster_templates.py
@@ -120,7 +120,7 @@
template_info = self._create_cluster_template()
# check for cluster template in list
- _, templates = self.client.list_cluster_templates()
+ templates = self.client.list_cluster_templates()
templates_info = [(template['id'], template['name'])
for template in templates]
self.assertIn(template_info, templates_info)
@@ -130,7 +130,7 @@
template_id, template_name = self._create_cluster_template()
# check cluster template fetch by id
- _, template = self.client.get_cluster_template(template_id)
+ template = self.client.get_cluster_template(template_id)
self.assertEqual(template_name, template['name'])
self.assertDictContainsSubset(self.cluster_template, template)
diff --git a/tempest/api/data_processing/test_data_sources.py b/tempest/api/data_processing/test_data_sources.py
index 3650751..a50f44b 100644
--- a/tempest/api/data_processing/test_data_sources.py
+++ b/tempest/api/data_processing/test_data_sources.py
@@ -68,13 +68,13 @@
def _list_data_sources(self, source_info):
# check for data source in list
- _, sources = self.client.list_data_sources()
+ sources = self.client.list_data_sources()
sources_info = [(source['id'], source['name']) for source in sources]
self.assertIn(source_info, sources_info)
def _get_data_source(self, source_id, source_name, source_body):
# check data source fetch by id
- _, source = self.client.get_data_source(source_id)
+ source = self.client.get_data_source(source_id)
self.assertEqual(source_name, source['name'])
self.assertDictContainsSubset(source_body, source)
diff --git a/tempest/api/data_processing/test_job_binaries.py b/tempest/api/data_processing/test_job_binaries.py
index d006991..2f6d998 100644
--- a/tempest/api/data_processing/test_job_binaries.py
+++ b/tempest/api/data_processing/test_job_binaries.py
@@ -78,7 +78,7 @@
binary_info = self._create_job_binary(self.swift_job_binary_with_extra)
# check for job binary in list
- _, binaries = self.client.list_job_binaries()
+ binaries = self.client.list_job_binaries()
binaries_info = [(binary['id'], binary['name']) for binary in binaries]
self.assertIn(binary_info, binaries_info)
@@ -88,7 +88,7 @@
self._create_job_binary(self.swift_job_binary_with_extra))
# check job binary fetch by id
- _, binary = self.client.get_job_binary(binary_id)
+ binary = self.client.get_job_binary(binary_id)
self.assertEqual(binary_name, binary['name'])
self.assertDictContainsSubset(self.swift_job_binary, binary)
@@ -109,7 +109,7 @@
binary_info = self._create_job_binary(self.internal_db_job_binary)
# check for job binary in list
- _, binaries = self.client.list_job_binaries()
+ binaries = self.client.list_job_binaries()
binaries_info = [(binary['id'], binary['name']) for binary in binaries]
self.assertIn(binary_info, binaries_info)
@@ -119,7 +119,7 @@
self._create_job_binary(self.internal_db_job_binary))
# check job binary fetch by id
- _, binary = self.client.get_job_binary(binary_id)
+ binary = self.client.get_job_binary(binary_id)
self.assertEqual(binary_name, binary['name'])
self.assertDictContainsSubset(self.internal_db_job_binary, binary)
diff --git a/tempest/api/data_processing/test_job_binary_internals.py b/tempest/api/data_processing/test_job_binary_internals.py
index 7e99867..b8121a0 100644
--- a/tempest/api/data_processing/test_job_binary_internals.py
+++ b/tempest/api/data_processing/test_job_binary_internals.py
@@ -55,7 +55,7 @@
binary_info = self._create_job_binary_internal()
# check for job binary internal in list
- _, binaries = self.client.list_job_binary_internals()
+ binaries = self.client.list_job_binary_internals()
binaries_info = [(binary['id'], binary['name']) for binary in binaries]
self.assertIn(binary_info, binaries_info)
@@ -64,7 +64,7 @@
binary_id, binary_name = self._create_job_binary_internal()
# check job binary internal fetch by id
- _, binary = self.client.get_job_binary_internal(binary_id)
+ binary = self.client.get_job_binary_internal(binary_id)
self.assertEqual(binary_name, binary['name'])
@test.attr(type='smoke')
diff --git a/tempest/api/data_processing/test_jobs.py b/tempest/api/data_processing/test_jobs.py
index 5af2eef..a7beb0e 100644
--- a/tempest/api/data_processing/test_jobs.py
+++ b/tempest/api/data_processing/test_jobs.py
@@ -69,7 +69,7 @@
job_info = self._create_job()
# check for job in list
- _, jobs = self.client.list_jobs()
+ jobs = self.client.list_jobs()
jobs_info = [(job['id'], job['name']) for job in jobs]
self.assertIn(job_info, jobs_info)
@@ -78,7 +78,7 @@
job_id, job_name = self._create_job()
# check job fetch by id
- _, job = self.client.get_job(job_id)
+ job = self.client.get_job(job_id)
self.assertEqual(job_name, job['name'])
@test.attr(type='smoke')
diff --git a/tempest/api/data_processing/test_node_group_templates.py b/tempest/api/data_processing/test_node_group_templates.py
index f3f59fc..d37e910 100644
--- a/tempest/api/data_processing/test_node_group_templates.py
+++ b/tempest/api/data_processing/test_node_group_templates.py
@@ -69,7 +69,7 @@
template_info = self._create_node_group_template()
# check for node group template in list
- _, templates = self.client.list_node_group_templates()
+ templates = self.client.list_node_group_templates()
templates_info = [(template['id'], template['name'])
for template in templates]
self.assertIn(template_info, templates_info)
@@ -79,7 +79,7 @@
template_id, template_name = self._create_node_group_template()
# check node group template fetch by id
- _, template = self.client.get_node_group_template(template_id)
+ template = self.client.get_node_group_template(template_id)
self.assertEqual(template_name, template['name'])
self.assertDictContainsSubset(self.node_group_template, template)
diff --git a/tempest/api/data_processing/test_plugins.py b/tempest/api/data_processing/test_plugins.py
index 4b4ec48..8c9b720 100644
--- a/tempest/api/data_processing/test_plugins.py
+++ b/tempest/api/data_processing/test_plugins.py
@@ -25,7 +25,7 @@
It ensures main plugins availability.
"""
- _, plugins = self.client.list_plugins()
+ plugins = self.client.list_plugins()
plugins_names = [plugin['name'] for plugin in plugins]
for enabled_plugin in CONF.data_processing_feature_enabled.plugins:
self.assertIn(enabled_plugin, plugins_names)
@@ -39,12 +39,12 @@
@test.attr(type='smoke')
def test_plugin_get(self):
for plugin_name in self._list_all_plugin_names():
- _, plugin = self.client.get_plugin(plugin_name)
+ plugin = self.client.get_plugin(plugin_name)
self.assertEqual(plugin_name, plugin['name'])
for plugin_version in plugin['versions']:
- _, detailed_plugin = self.client.get_plugin(plugin_name,
- plugin_version)
+ detailed_plugin = self.client.get_plugin(plugin_name,
+ plugin_version)
self.assertEqual(plugin_name, detailed_plugin['name'])
# check that required image tags contains name and version
diff --git a/tempest/services/data_processing/v1_1/client.py b/tempest/services/data_processing/v1_1/client.py
index 55b6be6..8879373 100644
--- a/tempest/services/data_processing/v1_1/client.py
+++ b/tempest/services/data_processing/v1_1/client.py
@@ -33,7 +33,17 @@
"""Make a request using specified request_func and check response
status code.
- It returns pair: resp and response body.
+ It returns a ResponseBody.
+ """
+ resp, body = request_func(uri)
+ self.expected_success(resp_status, resp.status)
+ return service_client.ResponseBody(resp, body)
+
+ def _request_and_check_resp_data(self, request_func, uri, resp_status):
+ """Make a request using specified request_func and check response
+ status code.
+
+ It returns pair: resp and response data.
"""
resp, body = request_func(uri)
self.expected_success(resp_status, resp.status)
@@ -44,20 +54,35 @@
"""Make a request using specified request_func, check response status
code and parse response body.
- It returns pair: resp and parsed resource(s) body.
+ It returns a ResponseBody.
"""
headers = {'Content-Type': 'application/json'}
resp, body = request_func(uri, headers=headers, *args, **kwargs)
self.expected_success(resp_status, resp.status)
body = json.loads(body)
- return resp, body[resource_name]
+ return service_client.ResponseBody(resp, body[resource_name])
+
+ def _request_check_and_parse_resp_list(self, request_func, uri,
+ resp_status, resource_name,
+ *args, **kwargs):
+ """Make a request using specified request_func, check response status
+ code and parse response body.
+
+ It returns a ResponseBodyList.
+ """
+ headers = {'Content-Type': 'application/json'}
+ resp, body = request_func(uri, headers=headers, *args, **kwargs)
+ self.expected_success(resp_status, resp.status)
+ body = json.loads(body)
+ return service_client.ResponseBodyList(resp, body[resource_name])
def list_node_group_templates(self):
"""List all node group templates for a user."""
uri = 'node-group-templates'
- return self._request_check_and_parse_resp(self.get, uri,
- 200, 'node_group_templates')
+ return self._request_check_and_parse_resp_list(self.get, uri,
+ 200,
+ 'node_group_templates')
def get_node_group_template(self, tmpl_id):
"""Returns the details of a single node group template."""
@@ -98,8 +123,8 @@
"""List all enabled plugins."""
uri = 'plugins'
- return self._request_check_and_parse_resp(self.get,
- uri, 200, 'plugins')
+ return self._request_check_and_parse_resp_list(self.get,
+ uri, 200, 'plugins')
def get_plugin(self, plugin_name, plugin_version=None):
"""Returns the details of a single plugin."""
@@ -113,8 +138,9 @@
"""List all cluster templates for a user."""
uri = 'cluster-templates'
- return self._request_check_and_parse_resp(self.get, uri,
- 200, 'cluster_templates')
+ return self._request_check_and_parse_resp_list(self.get, uri,
+ 200,
+ 'cluster_templates')
def get_cluster_template(self, tmpl_id):
"""Returns the details of a single cluster template."""
@@ -154,8 +180,9 @@
"""List all data sources for a user."""
uri = 'data-sources'
- return self._request_check_and_parse_resp(self.get,
- uri, 200, 'data_sources')
+ return self._request_check_and_parse_resp_list(self.get,
+ uri, 200,
+ 'data_sources')
def get_data_source(self, source_id):
"""Returns the details of a single data source."""
@@ -191,8 +218,8 @@
"""List all job binary internals for a user."""
uri = 'job-binary-internals'
- return self._request_check_and_parse_resp(self.get,
- uri, 200, 'binaries')
+ return self._request_check_and_parse_resp_list(self.get,
+ uri, 200, 'binaries')
def get_job_binary_internal(self, job_binary_id):
"""Returns the details of a single job binary internal."""
@@ -218,14 +245,14 @@
"""Returns data of a single job binary internal."""
uri = 'job-binary-internals/%s/data' % job_binary_id
- return self._request_and_check_resp(self.get, uri, 200)
+ return self._request_and_check_resp_data(self.get, uri, 200)
def list_job_binaries(self):
"""List all job binaries for a user."""
uri = 'job-binaries'
- return self._request_check_and_parse_resp(self.get,
- uri, 200, 'binaries')
+ return self._request_check_and_parse_resp_list(self.get,
+ uri, 200, 'binaries')
def get_job_binary(self, job_binary_id):
"""Returns the details of a single job binary."""
@@ -261,13 +288,14 @@
"""Returns data of a single job binary."""
uri = 'job-binaries/%s/data' % job_binary_id
- return self._request_and_check_resp(self.get, uri, 200)
+ return self._request_and_check_resp_data(self.get, uri, 200)
def list_jobs(self):
"""List all jobs for a user."""
uri = 'jobs'
- return self._request_check_and_parse_resp(self.get, uri, 200, 'jobs')
+ return self._request_check_and_parse_resp_list(self.get,
+ uri, 200, 'jobs')
def get_job(self, job_id):
"""Returns the details of a single job."""