Full response for DataProcessingClient methods

Since service clients need to be migrated to Tempest-lib, they should
not truncate any things from API response. Current DataProcessing client
cuts out the top key of a response and returns only value to the caller.
All service clients should return the response as they get it from APIs.

Change-Id: I0d2d47db0ccfb306269a60557c4e510082ad0d3e
Partially-implements: blueprint method-return-value-and-move-service-clients-to-lib
diff --git a/tempest/api/data_processing/base.py b/tempest/api/data_processing/base.py
index 904cbb6..5d78539 100644
--- a/tempest/api/data_processing/base.py
+++ b/tempest/api/data_processing/base.py
@@ -297,6 +297,7 @@
                                                           flavor_id,
                                                           node_configs,
                                                           **kwargs)
+        resp_body = resp_body['node_group_template']
         # store id of created node group template
         cls._node_group_templates.append(resp_body['id'])
 
@@ -316,6 +317,7 @@
                                                        node_groups,
                                                        cluster_configs,
                                                        **kwargs)
+        resp_body = resp_body['cluster_template']
         # store id of created cluster template
         cls._cluster_templates.append(resp_body['id'])
 
@@ -330,6 +332,7 @@
         removed in tearDownClass method.
         """
         resp_body = cls.client.create_data_source(name, type, url, **kwargs)
+        resp_body = resp_body['data_source']
         # store id of created data source
         cls._data_sources.append(resp_body['id'])
 
@@ -343,6 +346,7 @@
         be automatically removed in tearDownClass method.
         """
         resp_body = cls.client.create_job_binary_internal(name, data)
+        resp_body = resp_body['job_binary_internal']
         # store id of created job binary internal
         cls._job_binary_internals.append(resp_body['id'])
 
@@ -357,6 +361,7 @@
         removed in tearDownClass method.
         """
         resp_body = cls.client.create_job_binary(name, url, extra, **kwargs)
+        resp_body = resp_body['job_binary']
         # store id of created job binary
         cls._job_binaries.append(resp_body['id'])
 
@@ -372,6 +377,7 @@
         """
         resp_body = cls.client.create_job(name,
                                           job_type, mains, libs, **kwargs)
+        resp_body = resp_body['job']
         # store id of created job
         cls._jobs.append(resp_body['id'])
 
@@ -400,7 +406,7 @@
         """
         if not cls.default_plugin:
             return None
-        plugin = cls.client.get_plugin(cls.default_plugin)
+        plugin = cls.client.get_plugin(cls.default_plugin)['plugin']
 
         for version in DEFAULT_TEMPLATES[cls.default_plugin].keys():
             if version in plugin['versions']:
diff --git a/tempest/api/data_processing/test_cluster_templates.py b/tempest/api/data_processing/test_cluster_templates.py
index e357a85..42cbd14 100644
--- a/tempest/api/data_processing/test_cluster_templates.py
+++ b/tempest/api/data_processing/test_cluster_templates.py
@@ -98,7 +98,7 @@
         template_info = self._create_cluster_template()
 
         # check for cluster template in list
-        templates = self.client.list_cluster_templates()
+        templates = self.client.list_cluster_templates()['cluster_templates']
         templates_info = [(template['id'], template['name'])
                           for template in templates]
         self.assertIn(template_info, templates_info)
@@ -110,6 +110,7 @@
 
         # check cluster template fetch by id
         template = self.client.get_cluster_template(template_id)
+        template = template['cluster_template']
         self.assertEqual(template_name, template['name'])
         self.assertDictContainsSubset(self.cluster_template, template)
 
diff --git a/tempest/api/data_processing/test_data_sources.py b/tempest/api/data_processing/test_data_sources.py
index dd16b2f..67d09a0 100644
--- a/tempest/api/data_processing/test_data_sources.py
+++ b/tempest/api/data_processing/test_data_sources.py
@@ -68,13 +68,13 @@
 
     def _list_data_sources(self, source_info):
         # check for data source in list
-        sources = self.client.list_data_sources()
+        sources = self.client.list_data_sources()['data_sources']
         sources_info = [(source['id'], source['name']) for source in sources]
         self.assertIn(source_info, sources_info)
 
     def _get_data_source(self, source_id, source_name, source_body):
         # check data source fetch by id
-        source = self.client.get_data_source(source_id)
+        source = self.client.get_data_source(source_id)['data_source']
         self.assertEqual(source_name, source['name'])
         self.assertDictContainsSubset(source_body, source)
 
diff --git a/tempest/api/data_processing/test_job_binaries.py b/tempest/api/data_processing/test_job_binaries.py
index fb21270..98b7e24 100644
--- a/tempest/api/data_processing/test_job_binaries.py
+++ b/tempest/api/data_processing/test_job_binaries.py
@@ -80,7 +80,7 @@
         binary_info = self._create_job_binary(self.swift_job_binary_with_extra)
 
         # check for job binary in list
-        binaries = self.client.list_job_binaries()
+        binaries = self.client.list_job_binaries()['binaries']
         binaries_info = [(binary['id'], binary['name']) for binary in binaries]
         self.assertIn(binary_info, binaries_info)
 
@@ -91,7 +91,7 @@
             self._create_job_binary(self.swift_job_binary_with_extra))
 
         # check job binary fetch by id
-        binary = self.client.get_job_binary(binary_id)
+        binary = self.client.get_job_binary(binary_id)['job_binary']
         self.assertEqual(binary_name, binary['name'])
         self.assertDictContainsSubset(self.swift_job_binary, binary)
 
@@ -115,7 +115,7 @@
         binary_info = self._create_job_binary(self.internal_db_job_binary)
 
         # check for job binary in list
-        binaries = self.client.list_job_binaries()
+        binaries = self.client.list_job_binaries()['binaries']
         binaries_info = [(binary['id'], binary['name']) for binary in binaries]
         self.assertIn(binary_info, binaries_info)
 
@@ -126,7 +126,7 @@
             self._create_job_binary(self.internal_db_job_binary))
 
         # check job binary fetch by id
-        binary = self.client.get_job_binary(binary_id)
+        binary = self.client.get_job_binary(binary_id)['job_binary']
         self.assertEqual(binary_name, binary['name'])
         self.assertDictContainsSubset(self.internal_db_job_binary, binary)
 
diff --git a/tempest/api/data_processing/test_job_binary_internals.py b/tempest/api/data_processing/test_job_binary_internals.py
index 3d76ebe..6919fa5 100644
--- a/tempest/api/data_processing/test_job_binary_internals.py
+++ b/tempest/api/data_processing/test_job_binary_internals.py
@@ -57,7 +57,7 @@
         binary_info = self._create_job_binary_internal()
 
         # check for job binary internal in list
-        binaries = self.client.list_job_binary_internals()
+        binaries = self.client.list_job_binary_internals()['binaries']
         binaries_info = [(binary['id'], binary['name']) for binary in binaries]
         self.assertIn(binary_info, binaries_info)
 
@@ -68,7 +68,7 @@
 
         # check job binary internal fetch by id
         binary = self.client.get_job_binary_internal(binary_id)
-        self.assertEqual(binary_name, binary['name'])
+        self.assertEqual(binary_name, binary['job_binary_internal']['name'])
 
     @test.attr(type='smoke')
     @test.idempotent_id('b3568c33-4eed-40d5-aae4-6ff3b2ac58f5')
diff --git a/tempest/api/data_processing/test_jobs.py b/tempest/api/data_processing/test_jobs.py
index 83eb54d..7798056 100644
--- a/tempest/api/data_processing/test_jobs.py
+++ b/tempest/api/data_processing/test_jobs.py
@@ -71,7 +71,7 @@
         job_info = self._create_job()
 
         # check for job in list
-        jobs = self.client.list_jobs()
+        jobs = self.client.list_jobs()['jobs']
         jobs_info = [(job['id'], job['name']) for job in jobs]
         self.assertIn(job_info, jobs_info)
 
@@ -81,7 +81,7 @@
         job_id, job_name = self._create_job()
 
         # check job fetch by id
-        job = self.client.get_job(job_id)
+        job = self.client.get_job(job_id)['job']
         self.assertEqual(job_name, job['name'])
 
     @test.attr(type='smoke')
diff --git a/tempest/api/data_processing/test_node_group_templates.py b/tempest/api/data_processing/test_node_group_templates.py
index 102799d..388bb58 100644
--- a/tempest/api/data_processing/test_node_group_templates.py
+++ b/tempest/api/data_processing/test_node_group_templates.py
@@ -65,6 +65,7 @@
 
         # check for node group template in list
         templates = self.client.list_node_group_templates()
+        templates = templates['node_group_templates']
         templates_info = [(template['id'], template['name'])
                           for template in templates]
         self.assertIn(template_info, templates_info)
@@ -76,6 +77,7 @@
 
         # check node group template fetch by id
         template = self.client.get_node_group_template(template_id)
+        template = template['node_group_template']
         self.assertEqual(template_name, template['name'])
         self.assertDictContainsSubset(self.node_group_template, template)
 
diff --git a/tempest/api/data_processing/test_plugins.py b/tempest/api/data_processing/test_plugins.py
index 92a5bd0..14594e4 100644
--- a/tempest/api/data_processing/test_plugins.py
+++ b/tempest/api/data_processing/test_plugins.py
@@ -25,7 +25,7 @@
 
         It ensures main plugins availability.
         """
-        plugins = self.client.list_plugins()
+        plugins = self.client.list_plugins()['plugins']
         plugins_names = [plugin['name'] for plugin in plugins]
         for enabled_plugin in CONF.data_processing_feature_enabled.plugins:
             self.assertIn(enabled_plugin, plugins_names)
@@ -41,12 +41,13 @@
     @test.idempotent_id('53cf6487-2cfb-4a6f-8671-97c542c6e901')
     def test_plugin_get(self):
         for plugin_name in self._list_all_plugin_names():
-            plugin = self.client.get_plugin(plugin_name)
+            plugin = self.client.get_plugin(plugin_name)['plugin']
             self.assertEqual(plugin_name, plugin['name'])
 
             for plugin_version in plugin['versions']:
                 detailed_plugin = self.client.get_plugin(plugin_name,
                                                          plugin_version)
+                detailed_plugin = detailed_plugin['plugin']
                 self.assertEqual(plugin_name, detailed_plugin['name'])
 
                 # check that required image tags contains name and version
diff --git a/tempest/services/data_processing/v1_1/data_processing_client.py b/tempest/services/data_processing/v1_1/data_processing_client.py
index bbc0f2a..cba4c42 100644
--- a/tempest/services/data_processing/v1_1/data_processing_client.py
+++ b/tempest/services/data_processing/v1_1/data_processing_client.py
@@ -39,8 +39,8 @@
         self.expected_success(resp_status, resp.status)
         return resp, body
 
-    def _request_check_and_parse_resp(self, request_func, uri, resp_status,
-                                      resource_name, *args, **kwargs):
+    def _request_check_and_parse_resp(self, request_func, uri,
+                                      resp_status, *args, **kwargs):
         """Make a request using specified request_func, check response status
         code and parse response body.
 
@@ -50,36 +50,19 @@
         resp, body = request_func(uri, headers=headers, *args, **kwargs)
         self.expected_success(resp_status, resp.status)
         body = json.loads(body)
-        return service_client.ResponseBody(resp, body[resource_name])
-
-    def _request_check_and_parse_resp_list(self, request_func, uri,
-                                           resp_status, resource_name,
-                                           *args, **kwargs):
-        """Make a request using specified request_func, check response status
-        code and parse response body.
-
-        It returns a ResponseBodyList.
-        """
-        headers = {'Content-Type': 'application/json'}
-        resp, body = request_func(uri, headers=headers, *args, **kwargs)
-        self.expected_success(resp_status, resp.status)
-        body = json.loads(body)
-        return service_client.ResponseBodyList(resp, body[resource_name])
+        return service_client.ResponseBody(resp, body)
 
     def list_node_group_templates(self):
         """List all node group templates for a user."""
 
         uri = 'node-group-templates'
-        return self._request_check_and_parse_resp_list(self.get, uri,
-                                                       200,
-                                                       'node_group_templates')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def get_node_group_template(self, tmpl_id):
         """Returns the details of a single node group template."""
 
         uri = 'node-group-templates/%s' % tmpl_id
-        return self._request_check_and_parse_resp(self.get, uri,
-                                                  200, 'node_group_template')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def create_node_group_template(self, name, plugin_name, hadoop_version,
                                    node_processes, flavor_id,
@@ -100,7 +83,6 @@
             'node_configs': node_configs or dict(),
         })
         return self._request_check_and_parse_resp(self.post, uri, 202,
-                                                  'node_group_template',
                                                   body=json.dumps(body))
 
     def delete_node_group_template(self, tmpl_id):
@@ -113,8 +95,7 @@
         """List all enabled plugins."""
 
         uri = 'plugins'
-        return self._request_check_and_parse_resp_list(self.get,
-                                                       uri, 200, 'plugins')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def get_plugin(self, plugin_name, plugin_version=None):
         """Returns the details of a single plugin."""
@@ -122,22 +103,19 @@
         uri = 'plugins/%s' % plugin_name
         if plugin_version:
             uri += '/%s' % plugin_version
-        return self._request_check_and_parse_resp(self.get, uri, 200, 'plugin')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def list_cluster_templates(self):
         """List all cluster templates for a user."""
 
         uri = 'cluster-templates'
-        return self._request_check_and_parse_resp_list(self.get, uri,
-                                                       200,
-                                                       'cluster_templates')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def get_cluster_template(self, tmpl_id):
         """Returns the details of a single cluster template."""
 
         uri = 'cluster-templates/%s' % tmpl_id
-        return self._request_check_and_parse_resp(self.get,
-                                                  uri, 200, 'cluster_template')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def create_cluster_template(self, name, plugin_name, hadoop_version,
                                 node_groups, cluster_configs=None,
@@ -157,7 +135,6 @@
             'cluster_configs': cluster_configs or dict(),
         })
         return self._request_check_and_parse_resp(self.post, uri, 202,
-                                                  'cluster_template',
                                                   body=json.dumps(body))
 
     def delete_cluster_template(self, tmpl_id):
@@ -170,16 +147,13 @@
         """List all data sources for a user."""
 
         uri = 'data-sources'
-        return self._request_check_and_parse_resp_list(self.get,
-                                                       uri, 200,
-                                                       'data_sources')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def get_data_source(self, source_id):
         """Returns the details of a single data source."""
 
         uri = 'data-sources/%s' % source_id
-        return self._request_check_and_parse_resp(self.get,
-                                                  uri, 200, 'data_source')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def create_data_source(self, name, data_source_type, url, **kwargs):
         """Creates data source with specified params.
@@ -195,8 +169,7 @@
             'url': url
         })
         return self._request_check_and_parse_resp(self.post, uri,
-                                                  202, 'data_source',
-                                                  body=json.dumps(body))
+                                                  202, body=json.dumps(body))
 
     def delete_data_source(self, source_id):
         """Deletes the specified data source by id."""
@@ -208,22 +181,19 @@
         """List all job binary internals for a user."""
 
         uri = 'job-binary-internals'
-        return self._request_check_and_parse_resp_list(self.get,
-                                                       uri, 200, 'binaries')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def get_job_binary_internal(self, job_binary_id):
         """Returns the details of a single job binary internal."""
 
         uri = 'job-binary-internals/%s' % job_binary_id
-        return self._request_check_and_parse_resp(self.get, uri,
-                                                  200, 'job_binary_internal')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def create_job_binary_internal(self, name, data):
         """Creates job binary internal with specified params."""
 
         uri = 'job-binary-internals/%s' % name
-        return self._request_check_and_parse_resp(self.put, uri, 202,
-                                                  'job_binary_internal', data)
+        return self._request_check_and_parse_resp(self.put, uri, 202, data)
 
     def delete_job_binary_internal(self, job_binary_id):
         """Deletes the specified job binary internal by id."""
@@ -241,15 +211,13 @@
         """List all job binaries for a user."""
 
         uri = 'job-binaries'
-        return self._request_check_and_parse_resp_list(self.get,
-                                                       uri, 200, 'binaries')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def get_job_binary(self, job_binary_id):
         """Returns the details of a single job binary."""
 
         uri = 'job-binaries/%s' % job_binary_id
-        return self._request_check_and_parse_resp(self.get,
-                                                  uri, 200, 'job_binary')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def create_job_binary(self, name, url, extra=None, **kwargs):
         """Creates job binary with specified params.
@@ -265,8 +233,7 @@
             'extra': extra or dict(),
         })
         return self._request_check_and_parse_resp(self.post, uri,
-                                                  202, 'job_binary',
-                                                  body=json.dumps(body))
+                                                  202, body=json.dumps(body))
 
     def delete_job_binary(self, job_binary_id):
         """Deletes the specified job binary by id."""
@@ -284,14 +251,13 @@
         """List all jobs for a user."""
 
         uri = 'jobs'
-        return self._request_check_and_parse_resp_list(self.get,
-                                                       uri, 200, 'jobs')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def get_job(self, job_id):
         """Returns the details of a single job."""
 
         uri = 'jobs/%s' % job_id
-        return self._request_check_and_parse_resp(self.get, uri, 200, 'job')
+        return self._request_check_and_parse_resp(self.get, uri, 200)
 
     def create_job(self, name, job_type, mains, libs=None, **kwargs):
         """Creates job with specified params.
@@ -307,8 +273,8 @@
             'mains': mains,
             'libs': libs or list(),
         })
-        return self._request_check_and_parse_resp(self.post, uri, 202,
-                                                  'job', body=json.dumps(body))
+        return self._request_check_and_parse_resp(self.post, uri,
+                                                  202, body=json.dumps(body))
 
     def delete_job(self, job_id):
         """Deletes the specified job by id."""