Remove Sahara tests from Tempest
I think now is a good time. It's been sitting here for ages. We
really should try to make Tempest smaller.
Change-Id: I3370a146042f23a5ba2ca40d2ddf76e7c9e9cf0f
diff --git a/releasenotes/notes/remove-sahara-tests-1532c47c7df80e3a.yaml b/releasenotes/notes/remove-sahara-tests-1532c47c7df80e3a.yaml
new file mode 100644
index 0000000..b541cf9
--- /dev/null
+++ b/releasenotes/notes/remove-sahara-tests-1532c47c7df80e3a.yaml
@@ -0,0 +1,4 @@
+---
+upgrade:
+ - All tests for the Sahara project have been removed from Tempest. They now
+ live as a Tempest plugin in the ``openstack/sahara-tests`` repository.
diff --git a/tempest/api/data_processing/__init__.py b/tempest/api/data_processing/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tempest/api/data_processing/__init__.py
+++ /dev/null
diff --git a/tempest/api/data_processing/base.py b/tempest/api/data_processing/base.py
deleted file mode 100644
index c8506ae..0000000
--- a/tempest/api/data_processing/base.py
+++ /dev/null
@@ -1,442 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import collections
-import copy
-
-import six
-
-from tempest import config
-from tempest import exceptions
-from tempest.lib.common.utils import test_utils
-import tempest.test
-
-
-CONF = config.CONF
-
-"""Default templates.
-There should always be at least a master1 and a worker1 node
-group template."""
-BASE_VANILLA_DESC = {
- 'NODES': {
- 'master1': {
- 'count': 1,
- 'node_processes': ['namenode', 'resourcemanager',
- 'hiveserver']
- },
- 'master2': {
- 'count': 1,
- 'node_processes': ['oozie', 'historyserver',
- 'secondarynamenode']
- },
- 'worker1': {
- 'count': 1,
- 'node_processes': ['datanode', 'nodemanager'],
- 'node_configs': {
- 'MapReduce': {
- 'yarn.app.mapreduce.am.resource.mb': 256,
- 'yarn.app.mapreduce.am.command-opts': '-Xmx256m'
- },
- 'YARN': {
- 'yarn.scheduler.minimum-allocation-mb': 256,
- 'yarn.scheduler.maximum-allocation-mb': 1024,
- 'yarn.nodemanager.vmem-check-enabled': False
- }
- }
- }
- },
- 'cluster_configs': {
- 'HDFS': {
- 'dfs.replication': 1
- }
- }
-}
-
-BASE_SPARK_DESC = {
- 'NODES': {
- 'master1': {
- 'count': 1,
- 'node_processes': ['namenode', 'master']
- },
- 'worker1': {
- 'count': 1,
- 'node_processes': ['datanode', 'slave']
- }
- },
- 'cluster_configs': {
- 'HDFS': {
- 'dfs.replication': 1
- }
- }
-}
-
-BASE_CDH_DESC = {
- 'NODES': {
- 'master1': {
- 'count': 1,
- 'node_processes': ['CLOUDERA_MANAGER']
- },
- 'master2': {
- 'count': 1,
- 'node_processes': ['HDFS_NAMENODE',
- 'YARN_RESOURCEMANAGER']
- },
- 'master3': {
- 'count': 1,
- 'node_processes': ['OOZIE_SERVER', 'YARN_JOBHISTORY',
- 'HDFS_SECONDARYNAMENODE',
- 'HIVE_METASTORE', 'HIVE_SERVER2']
- },
- 'worker1': {
- 'count': 1,
- 'node_processes': ['YARN_NODEMANAGER', 'HDFS_DATANODE']
- }
- },
- 'cluster_configs': {
- 'HDFS': {
- 'dfs_replication': 1
- }
- }
-}
-
-
-DEFAULT_TEMPLATES = {
- 'vanilla': collections.OrderedDict([
- ('2.6.0', copy.deepcopy(BASE_VANILLA_DESC)),
- ('2.7.1', copy.deepcopy(BASE_VANILLA_DESC)),
- ('1.2.1', {
- 'NODES': {
- 'master1': {
- 'count': 1,
- 'node_processes': ['namenode', 'jobtracker']
- },
- 'worker1': {
- 'count': 1,
- 'node_processes': ['datanode', 'tasktracker'],
- 'node_configs': {
- 'HDFS': {
- 'Data Node Heap Size': 1024
- },
- 'MapReduce': {
- 'Task Tracker Heap Size': 1024
- }
- }
- }
- },
- 'cluster_configs': {
- 'HDFS': {
- 'dfs.replication': 1
- },
- 'MapReduce': {
- 'mapred.map.tasks.speculative.execution': False,
- 'mapred.child.java.opts': '-Xmx500m'
- },
- 'general': {
- 'Enable Swift': False
- }
- }
- })
- ]),
- 'hdp': collections.OrderedDict([
- ('2.0.6', {
- 'NODES': {
- 'master1': {
- 'count': 1,
- 'node_processes': ['NAMENODE', 'SECONDARY_NAMENODE',
- 'ZOOKEEPER_SERVER', 'AMBARI_SERVER',
- 'HISTORYSERVER', 'RESOURCEMANAGER',
- 'GANGLIA_SERVER', 'NAGIOS_SERVER',
- 'OOZIE_SERVER']
- },
- 'worker1': {
- 'count': 1,
- 'node_processes': ['HDFS_CLIENT', 'DATANODE',
- 'YARN_CLIENT', 'ZOOKEEPER_CLIENT',
- 'MAPREDUCE2_CLIENT', 'NODEMANAGER',
- 'PIG', 'OOZIE_CLIENT']
- }
- },
- 'cluster_configs': {
- 'HDFS': {
- 'dfs.replication': 1
- }
- }
- })
- ]),
- 'spark': collections.OrderedDict([
- ('1.0.0', copy.deepcopy(BASE_SPARK_DESC)),
- ('1.3.1', copy.deepcopy(BASE_SPARK_DESC))
- ]),
- 'cdh': collections.OrderedDict([
- ('5.4.0', copy.deepcopy(BASE_CDH_DESC)),
- ('5.3.0', copy.deepcopy(BASE_CDH_DESC)),
- ('5', copy.deepcopy(BASE_CDH_DESC))
- ]),
-}
-
-
-class BaseDataProcessingTest(tempest.test.BaseTestCase):
-
- credentials = ['primary']
-
- @classmethod
- def skip_checks(cls):
- super(BaseDataProcessingTest, cls).skip_checks()
- if not CONF.service_available.sahara:
- raise cls.skipException('Sahara support is required')
- cls.default_plugin = cls._get_default_plugin()
-
- @classmethod
- def setup_clients(cls):
- super(BaseDataProcessingTest, cls).setup_clients()
- cls.client = cls.os.data_processing_client
-
- @classmethod
- def resource_setup(cls):
- super(BaseDataProcessingTest, cls).resource_setup()
-
- cls.default_version = cls._get_default_version()
- if cls.default_plugin is not None and cls.default_version is None:
- raise exceptions.InvalidConfiguration(
- message="No known Sahara plugin version was found")
- cls.flavor_ref = CONF.compute.flavor_ref
-
- # add lists for watched resources
- cls._node_group_templates = []
- cls._cluster_templates = []
- cls._data_sources = []
- cls._job_binary_internals = []
- cls._job_binaries = []
- cls._jobs = []
-
- @classmethod
- def resource_cleanup(cls):
- cls.cleanup_resources(getattr(cls, '_cluster_templates', []),
- cls.client.delete_cluster_template)
- cls.cleanup_resources(getattr(cls, '_node_group_templates', []),
- cls.client.delete_node_group_template)
- cls.cleanup_resources(getattr(cls, '_jobs', []), cls.client.delete_job)
- cls.cleanup_resources(getattr(cls, '_job_binaries', []),
- cls.client.delete_job_binary)
- cls.cleanup_resources(getattr(cls, '_job_binary_internals', []),
- cls.client.delete_job_binary_internal)
- cls.cleanup_resources(getattr(cls, '_data_sources', []),
- cls.client.delete_data_source)
- super(BaseDataProcessingTest, cls).resource_cleanup()
-
- @staticmethod
- def cleanup_resources(resource_id_list, method):
- for resource_id in resource_id_list:
- test_utils.call_and_ignore_notfound_exc(method, resource_id)
-
- @classmethod
- def create_node_group_template(cls, name, plugin_name, hadoop_version,
- node_processes, flavor_id,
- node_configs=None, **kwargs):
- """Creates watched node group template with specified params.
-
- It supports passing additional params using kwargs and returns created
- object. All resources created in this method will be automatically
- removed in tearDownClass method.
- """
- resp_body = cls.client.create_node_group_template(name, plugin_name,
- hadoop_version,
- node_processes,
- flavor_id,
- node_configs,
- **kwargs)
- resp_body = resp_body['node_group_template']
- # store id of created node group template
- cls._node_group_templates.append(resp_body['id'])
-
- return resp_body
-
- @classmethod
- def create_cluster_template(cls, name, plugin_name, hadoop_version,
- node_groups, cluster_configs=None, **kwargs):
- """Creates watched cluster template with specified params.
-
- It supports passing additional params using kwargs and returns created
- object. All resources created in this method will be automatically
- removed in tearDownClass method.
- """
- resp_body = cls.client.create_cluster_template(name, plugin_name,
- hadoop_version,
- node_groups,
- cluster_configs,
- **kwargs)
- resp_body = resp_body['cluster_template']
- # store id of created cluster template
- cls._cluster_templates.append(resp_body['id'])
-
- return resp_body
-
- @classmethod
- def create_data_source(cls, name, type, url, **kwargs):
- """Creates watched data source with specified params.
-
- It supports passing additional params using kwargs and returns created
- object. All resources created in this method will be automatically
- removed in tearDownClass method.
- """
- resp_body = cls.client.create_data_source(name, type, url, **kwargs)
- resp_body = resp_body['data_source']
- # store id of created data source
- cls._data_sources.append(resp_body['id'])
-
- return resp_body
-
- @classmethod
- def create_job_binary_internal(cls, name, data):
- """Creates watched job binary internal with specified params.
-
- It returns created object. All resources created in this method will
- be automatically removed in tearDownClass method.
- """
- resp_body = cls.client.create_job_binary_internal(name, data)
- resp_body = resp_body['job_binary_internal']
- # store id of created job binary internal
- cls._job_binary_internals.append(resp_body['id'])
-
- return resp_body
-
- @classmethod
- def create_job_binary(cls, name, url, extra=None, **kwargs):
- """Creates watched job binary with specified params.
-
- It supports passing additional params using kwargs and returns created
- object. All resources created in this method will be automatically
- removed in tearDownClass method.
- """
- resp_body = cls.client.create_job_binary(name, url, extra, **kwargs)
- resp_body = resp_body['job_binary']
- # store id of created job binary
- cls._job_binaries.append(resp_body['id'])
-
- return resp_body
-
- @classmethod
- def create_job(cls, name, job_type, mains, libs=None, **kwargs):
- """Creates watched job with specified params.
-
- It supports passing additional params using kwargs and returns created
- object. All resources created in this method will be automatically
- removed in tearDownClass method.
- """
- resp_body = cls.client.create_job(name,
- job_type, mains, libs, **kwargs)
- resp_body = resp_body['job']
- # store id of created job
- cls._jobs.append(resp_body['id'])
-
- return resp_body
-
- @classmethod
- def _get_default_plugin(cls):
- """Returns the default plugin used for testing."""
- if len(CONF.data_processing_feature_enabled.plugins) == 0:
- return None
-
- for plugin in CONF.data_processing_feature_enabled.plugins:
- if plugin in DEFAULT_TEMPLATES:
- break
- else:
- plugin = ''
- return plugin
-
- @classmethod
- def _get_default_version(cls):
- """Returns the default plugin version used for testing.
-
- This is gathered separately from the plugin to allow
- the usage of plugin name in skip_checks. This method is
- rather invoked into resource_setup, which allows API calls
- and exceptions.
- """
- if not cls.default_plugin:
- return None
- plugin = cls.client.get_plugin(cls.default_plugin)['plugin']
-
- for version in DEFAULT_TEMPLATES[cls.default_plugin].keys():
- if version in plugin['versions']:
- break
- else:
- version = None
-
- return version
-
- @classmethod
- def get_node_group_template(cls, nodegroup='worker1'):
- """Returns a node group template for the default plugin."""
- try:
- plugin_data = (
- DEFAULT_TEMPLATES[cls.default_plugin][cls.default_version]
- )
- nodegroup_data = plugin_data['NODES'][nodegroup]
- node_group_template = {
- 'description': 'Test node group template',
- 'plugin_name': cls.default_plugin,
- 'hadoop_version': cls.default_version,
- 'node_processes': nodegroup_data['node_processes'],
- 'flavor_id': cls.flavor_ref,
- 'node_configs': nodegroup_data.get('node_configs', {}),
- }
- return node_group_template
- except (IndexError, KeyError):
- return None
-
- @classmethod
- def get_cluster_template(cls, node_group_template_ids=None):
- """Returns a cluster template for the default plugin.
-
- node_group_template_defined contains the type and ID of pre-defined
- node group templates that have to be used in the cluster template
- (instead of dynamically defining them with 'node_processes').
- """
- if node_group_template_ids is None:
- node_group_template_ids = {}
- try:
- plugin_data = (
- DEFAULT_TEMPLATES[cls.default_plugin][cls.default_version]
- )
-
- all_node_groups = []
- for ng_name, ng_data in six.iteritems(plugin_data['NODES']):
- node_group = {
- 'name': '%s-node' % (ng_name),
- 'flavor_id': cls.flavor_ref,
- 'count': ng_data['count']
- }
- if ng_name in node_group_template_ids.keys():
- # node group already defined, use it
- node_group['node_group_template_id'] = (
- node_group_template_ids[ng_name]
- )
- else:
- # node_processes list defined on-the-fly
- node_group['node_processes'] = ng_data['node_processes']
- if 'node_configs' in ng_data:
- node_group['node_configs'] = ng_data['node_configs']
- all_node_groups.append(node_group)
-
- cluster_template = {
- 'description': 'Test cluster template',
- 'plugin_name': cls.default_plugin,
- 'hadoop_version': cls.default_version,
- 'cluster_configs': plugin_data.get('cluster_configs', {}),
- 'node_groups': all_node_groups,
- }
- return cluster_template
- except (IndexError, KeyError):
- return None
diff --git a/tempest/api/data_processing/test_cluster_templates.py b/tempest/api/data_processing/test_cluster_templates.py
deleted file mode 100644
index dfd8e27..0000000
--- a/tempest/api/data_processing/test_cluster_templates.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from tempest.api.data_processing import base as dp_base
-from tempest.common.utils import data_utils
-from tempest import exceptions
-from tempest import test
-
-
-class ClusterTemplateTest(dp_base.BaseDataProcessingTest):
- # Link to the API documentation is http://docs.openstack.org/developer/
- # sahara/restapi/rest_api_v1.0.html#cluster-templates
-
- @classmethod
- def skip_checks(cls):
- super(ClusterTemplateTest, cls).skip_checks()
- if cls.default_plugin is None:
- raise cls.skipException("No Sahara plugins configured")
-
- @classmethod
- def resource_setup(cls):
- super(ClusterTemplateTest, cls).resource_setup()
-
- # pre-define a node group templates
- node_group_template_w = cls.get_node_group_template('worker1')
- if node_group_template_w is None:
- raise exceptions.InvalidConfiguration(
- message="No known Sahara plugin was found")
-
- node_group_template_w['name'] = data_utils.rand_name(
- 'sahara-ng-template')
- resp_body = cls.create_node_group_template(**node_group_template_w)
- node_group_template_id = resp_body['id']
- configured_node_group_templates = {'worker1': node_group_template_id}
-
- cls.full_cluster_template = cls.get_cluster_template(
- configured_node_group_templates)
-
- # create cls.cluster_template variable to use for comparison to cluster
- # template response body. The 'node_groups' field in the response body
- # has some extra info that post body does not have. The 'node_groups'
- # field in the response body is something like this
- #
- # 'node_groups': [
- # {
- # 'count': 3,
- # 'name': 'worker-node',
- # 'volume_mount_prefix': '/volumes/disk',
- # 'created_at': '2014-05-21 14:31:37',
- # 'updated_at': None,
- # 'floating_ip_pool': None,
- # ...
- # },
- # ...
- # ]
- cls.cluster_template = cls.full_cluster_template.copy()
- del cls.cluster_template['node_groups']
-
- def _create_cluster_template(self, template_name=None):
- """Creates Cluster Template with optional name specified.
-
- It creates template, ensures template name and response body.
- Returns id and name of created template.
- """
- if not template_name:
- # generate random name if it's not specified
- template_name = data_utils.rand_name('sahara-cluster-template')
-
- # create cluster template
- resp_body = self.create_cluster_template(template_name,
- **self.full_cluster_template)
-
- # ensure that template created successfully
- self.assertEqual(template_name, resp_body['name'])
- self.assertDictContainsSubset(self.cluster_template, resp_body)
-
- return resp_body['id'], template_name
-
- @test.attr(type='smoke')
- @test.idempotent_id('3525f1f1-3f9c-407d-891a-a996237e728b')
- def test_cluster_template_create(self):
- self._create_cluster_template()
-
- @test.attr(type='smoke')
- @test.idempotent_id('7a161882-e430-4840-a1c6-1d928201fab2')
- def test_cluster_template_list(self):
- template_info = self._create_cluster_template()
-
- # check for cluster template in list
- templates = self.client.list_cluster_templates()['cluster_templates']
- templates_info = [(template['id'], template['name'])
- for template in templates]
- self.assertIn(template_info, templates_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('2b75fe22-f731-4b0f-84f1-89ab25f86637')
- def test_cluster_template_get(self):
- template_id, template_name = self._create_cluster_template()
-
- # check cluster template fetch by id
- template = self.client.get_cluster_template(template_id)
- template = template['cluster_template']
- self.assertEqual(template_name, template['name'])
- self.assertDictContainsSubset(self.cluster_template, template)
-
- @test.attr(type='smoke')
- @test.idempotent_id('ff1fd989-171c-4dd7-91fd-9fbc71b09675')
- def test_cluster_template_delete(self):
- template_id, _ = self._create_cluster_template()
-
- # delete the cluster template by id
- self.client.delete_cluster_template(template_id)
- # TODO(ylobankov): check that cluster template is really deleted
diff --git a/tempest/api/data_processing/test_data_sources.py b/tempest/api/data_processing/test_data_sources.py
deleted file mode 100644
index 67d09a0..0000000
--- a/tempest/api/data_processing/test_data_sources.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from tempest.api.data_processing import base as dp_base
-from tempest.common.utils import data_utils
-from tempest import test
-
-
-class DataSourceTest(dp_base.BaseDataProcessingTest):
- @classmethod
- def resource_setup(cls):
- super(DataSourceTest, cls).resource_setup()
- cls.swift_data_source_with_creds = {
- 'url': 'swift://sahara-container.sahara/input-source',
- 'description': 'Test data source',
- 'credentials': {
- 'user': cls.os.credentials.username,
- 'password': cls.os.credentials.password
- },
- 'type': 'swift'
- }
- cls.swift_data_source = cls.swift_data_source_with_creds.copy()
- del cls.swift_data_source['credentials']
-
- cls.local_hdfs_data_source = {
- 'url': 'input-source',
- 'description': 'Test data source',
- 'type': 'hdfs'
- }
-
- cls.external_hdfs_data_source = {
- 'url': 'hdfs://172.18.168.2:8020/usr/hadoop/input-source',
- 'description': 'Test data source',
- 'type': 'hdfs'
- }
-
- def _create_data_source(self, source_body, source_name=None):
- """Creates Data Source with optional name specified.
-
- It creates a link to input-source file (it may not exist), ensures
- source name and response body. Returns id and name of created source.
- """
- if not source_name:
- # generate random name if it's not specified
- source_name = data_utils.rand_name('sahara-data-source')
-
- # create data source
- resp_body = self.create_data_source(source_name, **source_body)
-
- # ensure that source created successfully
- self.assertEqual(source_name, resp_body['name'])
- if source_body['type'] == 'swift':
- source_body = self.swift_data_source
- self.assertDictContainsSubset(source_body, resp_body)
-
- return resp_body['id'], source_name
-
- def _list_data_sources(self, source_info):
- # check for data source in list
- sources = self.client.list_data_sources()['data_sources']
- sources_info = [(source['id'], source['name']) for source in sources]
- self.assertIn(source_info, sources_info)
-
- def _get_data_source(self, source_id, source_name, source_body):
- # check data source fetch by id
- source = self.client.get_data_source(source_id)['data_source']
- self.assertEqual(source_name, source['name'])
- self.assertDictContainsSubset(source_body, source)
-
- @test.attr(type='smoke')
- @test.idempotent_id('9e0e836d-c372-4fca-91b7-b66c3e9646c8')
- def test_swift_data_source_create(self):
- self._create_data_source(self.swift_data_source_with_creds)
-
- @test.attr(type='smoke')
- @test.idempotent_id('3cb87a4a-0534-4b97-9edc-8bbc822b68a0')
- def test_swift_data_source_list(self):
- source_info = (
- self._create_data_source(self.swift_data_source_with_creds))
- self._list_data_sources(source_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('fc07409b-6477-4cb3-9168-e633c46b227f')
- def test_swift_data_source_get(self):
- source_id, source_name = (
- self._create_data_source(self.swift_data_source_with_creds))
- self._get_data_source(source_id, source_name, self.swift_data_source)
-
- @test.attr(type='smoke')
- @test.idempotent_id('df53669c-0cd1-4cf7-b408-4cf215d8beb8')
- def test_swift_data_source_delete(self):
- source_id, _ = (
- self._create_data_source(self.swift_data_source_with_creds))
-
- # delete the data source by id
- self.client.delete_data_source(source_id)
-
- @test.attr(type='smoke')
- @test.idempotent_id('88505d52-db01-4229-8f1d-a1137da5fe2d')
- def test_local_hdfs_data_source_create(self):
- self._create_data_source(self.local_hdfs_data_source)
-
- @test.attr(type='smoke')
- @test.idempotent_id('81d7d42a-d7f6-4d9b-b38c-0801a4dfe3c2')
- def test_local_hdfs_data_source_list(self):
- source_info = self._create_data_source(self.local_hdfs_data_source)
- self._list_data_sources(source_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('ec0144c6-db1e-4169-bb06-7abae14a8443')
- def test_local_hdfs_data_source_get(self):
- source_id, source_name = (
- self._create_data_source(self.local_hdfs_data_source))
- self._get_data_source(
- source_id, source_name, self.local_hdfs_data_source)
-
- @test.attr(type='smoke')
- @test.idempotent_id('e398308b-4230-4f86-ba10-9b0b60a59c8d')
- def test_local_hdfs_data_source_delete(self):
- source_id, _ = self._create_data_source(self.local_hdfs_data_source)
-
- # delete the data source by id
- self.client.delete_data_source(source_id)
-
- @test.attr(type='smoke')
- @test.idempotent_id('bfd91128-e642-4d95-a973-3e536962180c')
- def test_external_hdfs_data_source_create(self):
- self._create_data_source(self.external_hdfs_data_source)
-
- @test.attr(type='smoke')
- @test.idempotent_id('92e2be72-f7ab-499d-ae01-fb9943c90d8e')
- def test_external_hdfs_data_source_list(self):
- source_info = self._create_data_source(self.external_hdfs_data_source)
- self._list_data_sources(source_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('a31edb1b-6bc6-4f42-871f-70cd243184ac')
- def test_external_hdfs_data_source_get(self):
- source_id, source_name = (
- self._create_data_source(self.external_hdfs_data_source))
- self._get_data_source(
- source_id, source_name, self.external_hdfs_data_source)
-
- @test.attr(type='smoke')
- @test.idempotent_id('295924cd-a085-4b45-aea8-0707cdb2da7e')
- def test_external_hdfs_data_source_delete(self):
- source_id, _ = self._create_data_source(self.external_hdfs_data_source)
-
- # delete the data source by id
- self.client.delete_data_source(source_id)
diff --git a/tempest/api/data_processing/test_job_binaries.py b/tempest/api/data_processing/test_job_binaries.py
deleted file mode 100644
index a47ddbc..0000000
--- a/tempest/api/data_processing/test_job_binaries.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from tempest.api.data_processing import base as dp_base
-from tempest.common.utils import data_utils
-from tempest import test
-
-
-class JobBinaryTest(dp_base.BaseDataProcessingTest):
- # Link to the API documentation is http://docs.openstack.org/developer/
- # sahara/restapi/rest_api_v1.1_EDP.html#job-binaries
-
- @classmethod
- def resource_setup(cls):
- super(JobBinaryTest, cls).resource_setup()
- cls.swift_job_binary_with_extra = {
- 'url': 'swift://sahara-container.sahara/example.jar',
- 'description': 'Test job binary',
- 'extra': {
- 'user': cls.os.credentials.username,
- 'password': cls.os.credentials.password
- }
- }
- # Create extra cls.swift_job_binary variable to use for comparison to
- # job binary response body because response body has no 'extra' field.
- cls.swift_job_binary = cls.swift_job_binary_with_extra.copy()
- del cls.swift_job_binary['extra']
-
- name = data_utils.rand_name('sahara-internal-job-binary')
- cls.job_binary_data = 'Some script may be data'
- job_binary_internal = (
- cls.create_job_binary_internal(name, cls.job_binary_data))
- cls.internal_db_job_binary = {
- 'url': 'internal-db://%s' % job_binary_internal['id'],
- 'description': 'Test job binary',
- }
-
- def _create_job_binary(self, binary_body, binary_name=None):
- """Creates Job Binary with optional name specified.
-
- It creates a link to data (jar, pig files, etc.), ensures job binary
- name and response body. Returns id and name of created job binary.
- Data may not exist when using Swift as data storage.
- In other cases data must exist in storage.
- """
- if not binary_name:
- # generate random name if it's not specified
- binary_name = data_utils.rand_name('sahara-job-binary')
-
- # create job binary
- resp_body = self.create_job_binary(binary_name, **binary_body)
-
- # ensure that binary created successfully
- self.assertEqual(binary_name, resp_body['name'])
- if 'swift' in binary_body['url']:
- binary_body = self.swift_job_binary
- self.assertDictContainsSubset(binary_body, resp_body)
-
- return resp_body['id'], binary_name
-
- @test.attr(type='smoke')
- @test.idempotent_id('c00d43f8-4360-45f8-b280-af1a201b12d3')
- def test_swift_job_binary_create(self):
- self._create_job_binary(self.swift_job_binary_with_extra)
-
- @test.attr(type='smoke')
- @test.idempotent_id('f8809352-e79d-4748-9359-ce1efce89f2a')
- def test_swift_job_binary_list(self):
- binary_info = self._create_job_binary(self.swift_job_binary_with_extra)
-
- # check for job binary in list
- binaries = self.client.list_job_binaries()['binaries']
- binaries_info = [(binary['id'], binary['name']) for binary in binaries]
- self.assertIn(binary_info, binaries_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('2d4a670f-e8f1-413c-b5ac-50c1bfe9e1b1')
- def test_swift_job_binary_get(self):
- binary_id, binary_name = (
- self._create_job_binary(self.swift_job_binary_with_extra))
-
- # check job binary fetch by id
- binary = self.client.get_job_binary(binary_id)['job_binary']
- self.assertEqual(binary_name, binary['name'])
- self.assertDictContainsSubset(self.swift_job_binary, binary)
-
- @test.attr(type='smoke')
- @test.idempotent_id('9b0e8f38-04f3-4616-b399-cfa7eb2677ed')
- def test_swift_job_binary_delete(self):
- binary_id, _ = (
- self._create_job_binary(self.swift_job_binary_with_extra))
-
- # delete the job binary by id
- self.client.delete_job_binary(binary_id)
-
- @test.attr(type='smoke')
- @test.idempotent_id('63662f6d-8291-407e-a6fc-f654522ebab6')
- def test_internal_db_job_binary_create(self):
- self._create_job_binary(self.internal_db_job_binary)
-
- @test.attr(type='smoke')
- @test.idempotent_id('38731e7b-6d9d-4ffa-8fd1-193c453e88b1')
- def test_internal_db_job_binary_list(self):
- binary_info = self._create_job_binary(self.internal_db_job_binary)
-
- # check for job binary in list
- binaries = self.client.list_job_binaries()['binaries']
- binaries_info = [(binary['id'], binary['name']) for binary in binaries]
- self.assertIn(binary_info, binaries_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('1b32199b-c3f5-43e1-a37a-3797e57b7066')
- def test_internal_db_job_binary_get(self):
- binary_id, binary_name = (
- self._create_job_binary(self.internal_db_job_binary))
-
- # check job binary fetch by id
- binary = self.client.get_job_binary(binary_id)['job_binary']
- self.assertEqual(binary_name, binary['name'])
- self.assertDictContainsSubset(self.internal_db_job_binary, binary)
-
- @test.attr(type='smoke')
- @test.idempotent_id('3c42b0c3-3e03-46a5-adf0-df0650271a4e')
- def test_internal_db_job_binary_delete(self):
- binary_id, _ = self._create_job_binary(self.internal_db_job_binary)
-
- # delete the job binary by id
- self.client.delete_job_binary(binary_id)
-
- @test.attr(type='smoke')
- @test.idempotent_id('d5d47659-7e2c-4ea7-b292-5b3e559e8587')
- def test_job_binary_get_data(self):
- binary_id, _ = self._create_job_binary(self.internal_db_job_binary)
-
- # get data of job binary by id
- _, data = self.client.get_job_binary_data(binary_id)
- self.assertEqual(data, self.job_binary_data)
diff --git a/tempest/api/data_processing/test_job_binary_internals.py b/tempest/api/data_processing/test_job_binary_internals.py
deleted file mode 100644
index b4f0769..0000000
--- a/tempest/api/data_processing/test_job_binary_internals.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from tempest.api.data_processing import base as dp_base
-from tempest.common.utils import data_utils
-from tempest import test
-
-
-class JobBinaryInternalTest(dp_base.BaseDataProcessingTest):
- # Link to the API documentation is http://docs.openstack.org/developer/
- # sahara/restapi/rest_api_v1.1_EDP.html#job-binary-internals
-
- @classmethod
- def resource_setup(cls):
- super(JobBinaryInternalTest, cls).resource_setup()
- cls.job_binary_internal_data = 'Some script may be data'
-
- def _create_job_binary_internal(self, binary_name=None):
- """Creates Job Binary Internal with optional name specified.
-
- It puts data into Sahara database and ensures job binary internal name.
- Returns id and name of created job binary internal.
- """
- if not binary_name:
- # generate random name if it's not specified
- binary_name = data_utils.rand_name('sahara-job-binary-internal')
-
- # create job binary internal
- resp_body = (
- self.create_job_binary_internal(binary_name,
- self.job_binary_internal_data))
-
- # ensure that job binary internal created successfully
- self.assertEqual(binary_name, resp_body['name'])
-
- return resp_body['id'], binary_name
-
- @test.attr(type='smoke')
- @test.idempotent_id('249c4dc2-946f-4939-83e6-212ddb6ea0be')
- def test_job_binary_internal_create(self):
- self._create_job_binary_internal()
-
- @test.attr(type='smoke')
- @test.idempotent_id('1e3c2ecd-5673-499d-babe-4fe2fcdf64ee')
- def test_job_binary_internal_list(self):
- binary_info = self._create_job_binary_internal()
-
- # check for job binary internal in list
- binaries = self.client.list_job_binary_internals()['binaries']
- binaries_info = [(binary['id'], binary['name']) for binary in binaries]
- self.assertIn(binary_info, binaries_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('a2046a53-386c-43ab-be35-df54b19db776')
- def test_job_binary_internal_get(self):
- binary_id, binary_name = self._create_job_binary_internal()
-
- # check job binary internal fetch by id
- binary = self.client.get_job_binary_internal(binary_id)
- self.assertEqual(binary_name, binary['job_binary_internal']['name'])
-
- @test.attr(type='smoke')
- @test.idempotent_id('b3568c33-4eed-40d5-aae4-6ff3b2ac58f5')
- def test_job_binary_internal_delete(self):
- binary_id, _ = self._create_job_binary_internal()
-
- # delete the job binary internal by id
- self.client.delete_job_binary_internal(binary_id)
-
- @test.attr(type='smoke')
- @test.idempotent_id('8871f2b0-5782-4d66-9bb9-6f95bcb839ea')
- def test_job_binary_internal_get_data(self):
- binary_id, _ = self._create_job_binary_internal()
-
- # get data of job binary internal by id
- _, data = self.client.get_job_binary_internal_data(binary_id)
- self.assertEqual(data, self.job_binary_internal_data)
diff --git a/tempest/api/data_processing/test_jobs.py b/tempest/api/data_processing/test_jobs.py
deleted file mode 100644
index 8503320..0000000
--- a/tempest/api/data_processing/test_jobs.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from tempest.api.data_processing import base as dp_base
-from tempest.common.utils import data_utils
-from tempest import test
-
-
-class JobTest(dp_base.BaseDataProcessingTest):
- # NOTE: Link to the API documentation: http://docs.openstack.org/developer/
- # sahara/restapi/rest_api_v1.1_EDP.html#jobs
-
- @classmethod
- def resource_setup(cls):
- super(JobTest, cls).resource_setup()
- # create job binary
- job_binary = {
- 'name': data_utils.rand_name('sahara-job-binary'),
- 'url': 'swift://sahara-container.sahara/example.jar',
- 'description': 'Test job binary',
- 'extra': {
- 'user': cls.os.credentials.username,
- 'password': cls.os.credentials.password
- }
- }
- resp_body = cls.create_job_binary(**job_binary)
- job_binary_id = resp_body['id']
-
- cls.job = {
- 'job_type': 'Pig',
- 'mains': [job_binary_id]
- }
-
- def _create_job(self, job_name=None):
- """Creates Job with optional name specified.
-
- It creates job and ensures job name. Returns id and name of created
- job.
- """
- if not job_name:
- # generate random name if it's not specified
- job_name = data_utils.rand_name('sahara-job')
-
- # create job
- resp_body = self.create_job(job_name, **self.job)
-
- # ensure that job created successfully
- self.assertEqual(job_name, resp_body['name'])
-
- return resp_body['id'], job_name
-
- @test.attr(type='smoke')
- @test.idempotent_id('8cf785ca-adf4-473d-8281-fb9a5efa3073')
- def test_job_create(self):
- self._create_job()
-
- @test.attr(type='smoke')
- @test.idempotent_id('41e253fe-b02a-41a0-b186-5ff1f0463ba3')
- def test_job_list(self):
- job_info = self._create_job()
-
- # check for job in list
- jobs = self.client.list_jobs()['jobs']
- jobs_info = [(job['id'], job['name']) for job in jobs]
- self.assertIn(job_info, jobs_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('3faf17fa-bc94-4a60-b1c3-79e53674c16c')
- def test_job_get(self):
- job_id, job_name = self._create_job()
-
- # check job fetch by id
- job = self.client.get_job(job_id)['job']
- self.assertEqual(job_name, job['name'])
-
- @test.attr(type='smoke')
- @test.idempotent_id('dff85e62-7dda-4ad8-b1ee-850adecb0c6e')
- def test_job_delete(self):
- job_id, _ = self._create_job()
-
- # delete the job by id
- self.client.delete_job(job_id)
diff --git a/tempest/api/data_processing/test_node_group_templates.py b/tempest/api/data_processing/test_node_group_templates.py
deleted file mode 100644
index c2dae85..0000000
--- a/tempest/api/data_processing/test_node_group_templates.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from tempest.api.data_processing import base as dp_base
-from tempest.common.utils import data_utils
-from tempest import test
-
-
-class NodeGroupTemplateTest(dp_base.BaseDataProcessingTest):
-
- @classmethod
- def skip_checks(cls):
- super(NodeGroupTemplateTest, cls).skip_checks()
- if cls.default_plugin is None:
- raise cls.skipException("No Sahara plugins configured")
-
- def _create_node_group_template(self, template_name=None):
- """Creates Node Group Template with optional name specified.
-
- It creates template, ensures template name and response body.
- Returns id and name of created template.
- """
- self.node_group_template = self.get_node_group_template()
- self.assertIsNotNone(self.node_group_template,
- "No known Sahara plugin was found")
-
- if not template_name:
- # generate random name if it's not specified
- template_name = data_utils.rand_name('sahara-ng-template')
-
- # create node group template
- resp_body = self.create_node_group_template(template_name,
- **self.node_group_template)
-
- # ensure that template created successfully
- self.assertEqual(template_name, resp_body['name'])
- self.assertDictContainsSubset(self.node_group_template, resp_body)
-
- return resp_body['id'], template_name
-
- @test.attr(type='smoke')
- @test.idempotent_id('63164051-e46d-4387-9741-302ef4791cbd')
- def test_node_group_template_create(self):
- self._create_node_group_template()
-
- @test.attr(type='smoke')
- @test.idempotent_id('eb39801d-2612-45e5-88b1-b5d70b329185')
- def test_node_group_template_list(self):
- template_info = self._create_node_group_template()
-
- # check for node group template in list
- templates = self.client.list_node_group_templates()
- templates = templates['node_group_templates']
- templates_info = [(template['id'], template['name'])
- for template in templates]
- self.assertIn(template_info, templates_info)
-
- @test.attr(type='smoke')
- @test.idempotent_id('6ee31539-a708-466f-9c26-4093ce09a836')
- def test_node_group_template_get(self):
- template_id, template_name = self._create_node_group_template()
-
- # check node group template fetch by id
- template = self.client.get_node_group_template(template_id)
- template = template['node_group_template']
- self.assertEqual(template_name, template['name'])
- self.assertDictContainsSubset(self.node_group_template, template)
-
- @test.attr(type='smoke')
- @test.idempotent_id('f4f5cb82-708d-4031-81c4-b0618a706a2f')
- def test_node_group_template_delete(self):
- template_id, _ = self._create_node_group_template()
-
- # delete the node group template by id
- self.client.delete_node_group_template(template_id)
diff --git a/tempest/api/data_processing/test_plugins.py b/tempest/api/data_processing/test_plugins.py
deleted file mode 100644
index 14594e4..0000000
--- a/tempest/api/data_processing/test_plugins.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2014 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from tempest.api.data_processing import base as dp_base
-from tempest import config
-from tempest import test
-
-CONF = config.CONF
-
-
-class PluginsTest(dp_base.BaseDataProcessingTest):
- def _list_all_plugin_names(self):
- """Returns all enabled plugin names.
-
- It ensures main plugins availability.
- """
- plugins = self.client.list_plugins()['plugins']
- plugins_names = [plugin['name'] for plugin in plugins]
- for enabled_plugin in CONF.data_processing_feature_enabled.plugins:
- self.assertIn(enabled_plugin, plugins_names)
-
- return plugins_names
-
- @test.attr(type='smoke')
- @test.idempotent_id('01a005a3-426c-4c0b-9617-d09475403e09')
- def test_plugin_list(self):
- self._list_all_plugin_names()
-
- @test.attr(type='smoke')
- @test.idempotent_id('53cf6487-2cfb-4a6f-8671-97c542c6e901')
- def test_plugin_get(self):
- for plugin_name in self._list_all_plugin_names():
- plugin = self.client.get_plugin(plugin_name)['plugin']
- self.assertEqual(plugin_name, plugin['name'])
-
- for plugin_version in plugin['versions']:
- detailed_plugin = self.client.get_plugin(plugin_name,
- plugin_version)
- detailed_plugin = detailed_plugin['plugin']
- self.assertEqual(plugin_name, detailed_plugin['name'])
-
- # check that required image tags contains name and version
- image_tags = detailed_plugin['required_image_tags']
- self.assertIn(plugin_name, image_tags)
- self.assertIn(plugin_version, image_tags)
diff --git a/tempest/clients.py b/tempest/clients.py
index 765a526..6cb6980 100644
--- a/tempest/clients.py
+++ b/tempest/clients.py
@@ -24,7 +24,6 @@
from tempest.lib import exceptions as lib_exc
from tempest.lib.services import clients
from tempest.services import baremetal
-from tempest.services import data_processing
from tempest.services import identity
from tempest.services import object_storage
from tempest.services import orchestration
@@ -39,7 +38,7 @@
default_params = config.service_client_config()
- # TODO(andreaf) This is only used by data_processing and baremetal clients,
+ # TODO(andreaf) This is only used by baremetal clients,
# and should be removed once they are out of Tempest
default_params_with_timeout_values = {
'build_interval': CONF.compute.build_interval,
@@ -84,12 +83,6 @@
build_interval=CONF.orchestration.build_interval,
build_timeout=CONF.orchestration.build_timeout,
**self.default_params)
- self.data_processing_client = data_processing.DataProcessingClient(
- self.auth_provider,
- CONF.data_processing.catalog_type,
- CONF.identity.region,
- endpoint_type=CONF.data_processing.endpoint_type,
- **self.default_params_with_timeout_values)
self.negative_client = negative_rest_client.NegativeRestClient(
self.auth_provider, service, **self.default_params)
diff --git a/tempest/cmd/verify_tempest_config.py b/tempest/cmd/verify_tempest_config.py
index b2e72c5..381f3df 100644
--- a/tempest/cmd/verify_tempest_config.py
+++ b/tempest/cmd/verify_tempest_config.py
@@ -286,7 +286,6 @@
'object_storage': 'swift',
'compute': 'nova',
'orchestration': 'heat',
- 'data_processing': 'sahara',
'baremetal': 'ironic',
'identity': 'keystone',
}
diff --git a/tempest/config.py b/tempest/config.py
index eeafea6..952e82a 100644
--- a/tempest/config.py
+++ b/tempest/config.py
@@ -886,34 +886,6 @@
help="Value must match heat configuration of the same name."),
]
-data_processing_group = cfg.OptGroup(name="data-processing",
- title="Data Processing options")
-
-DataProcessingGroup = [
- cfg.StrOpt('catalog_type',
- default='data-processing',
- deprecated_group="data_processing",
- help="Catalog type of the data processing service."),
- cfg.StrOpt('endpoint_type',
- default='publicURL',
- choices=['public', 'admin', 'internal',
- 'publicURL', 'adminURL', 'internalURL'],
- deprecated_group="data_processing",
- help="The endpoint type to use for the data processing "
- "service."),
-]
-
-
-data_processing_feature_group = cfg.OptGroup(
- name="data-processing-feature-enabled",
- title="Enabled Data Processing features")
-
-DataProcessingFeaturesGroup = [
- cfg.ListOpt('plugins',
- default=["vanilla", "cdh"],
- deprecated_group="data_processing-feature-enabled",
- help="List of enabled data processing plugins")
-]
stress_group = cfg.OptGroup(name='stress', title='Stress Test Options')
@@ -1160,8 +1132,6 @@
(object_storage_group, ObjectStoreGroup),
(object_storage_feature_group, ObjectStoreFeaturesGroup),
(orchestration_group, OrchestrationGroup),
- (data_processing_group, DataProcessingGroup),
- (data_processing_feature_group, DataProcessingFeaturesGroup),
(stress_group, StressGroup),
(scenario_group, ScenarioGroup),
(service_available_group, ServiceAvailableGroup),
@@ -1227,9 +1197,6 @@
self.object_storage_feature_enabled = _CONF[
'object-storage-feature-enabled']
self.orchestration = _CONF.orchestration
- self.data_processing = _CONF['data-processing']
- self.data_processing_feature_enabled = _CONF[
- 'data-processing-feature-enabled']
self.stress = _CONF.stress
self.scenario = _CONF.scenario
self.service_available = _CONF.service_available
diff --git a/tempest/services/data_processing/__init__.py b/tempest/services/data_processing/__init__.py
deleted file mode 100644
index c49bc5c..0000000
--- a/tempest/services/data_processing/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2016 Hewlett-Packard Enterprise Development Company, L.P.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-from tempest.services.data_processing.v1_1.data_processing_client import \
- DataProcessingClient
-
-__all__ = ['DataProcessingClient']
diff --git a/tempest/services/data_processing/v1_1/__init__.py b/tempest/services/data_processing/v1_1/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tempest/services/data_processing/v1_1/__init__.py
+++ /dev/null
diff --git a/tempest/services/data_processing/v1_1/data_processing_client.py b/tempest/services/data_processing/v1_1/data_processing_client.py
deleted file mode 100644
index c74672f..0000000
--- a/tempest/services/data_processing/v1_1/data_processing_client.py
+++ /dev/null
@@ -1,280 +0,0 @@
-# Copyright (c) 2013 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from oslo_serialization import jsonutils as json
-
-from tempest.lib.common import rest_client
-
-
-class DataProcessingClient(rest_client.RestClient):
-
- def _request_and_check_resp(self, request_func, uri, resp_status):
- """Make a request and check response status code.
-
- It returns a ResponseBody.
- """
- resp, body = request_func(uri)
- self.expected_success(resp_status, resp.status)
- return rest_client.ResponseBody(resp, body)
-
- def _request_and_check_resp_data(self, request_func, uri, resp_status):
- """Make a request and check response status code.
-
- It returns pair: resp and response data.
- """
- resp, body = request_func(uri)
- self.expected_success(resp_status, resp.status)
- return resp, body
-
- def _request_check_and_parse_resp(self, request_func, uri,
- resp_status, *args, **kwargs):
- """Make a request, check response status code and parse response body.
-
- It returns a ResponseBody.
- """
- headers = {'Content-Type': 'application/json'}
- resp, body = request_func(uri, headers=headers, *args, **kwargs)
- self.expected_success(resp_status, resp.status)
- body = json.loads(body)
- return rest_client.ResponseBody(resp, body)
-
- def list_node_group_templates(self):
- """List all node group templates for a user."""
-
- uri = 'node-group-templates'
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def get_node_group_template(self, tmpl_id):
- """Returns the details of a single node group template."""
-
- uri = 'node-group-templates/%s' % tmpl_id
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def create_node_group_template(self, name, plugin_name, hadoop_version,
- node_processes, flavor_id,
- node_configs=None, **kwargs):
- """Creates node group template with specified params.
-
- It supports passing additional params using kwargs and returns created
- object.
- """
- uri = 'node-group-templates'
- body = kwargs.copy()
- body.update({
- 'name': name,
- 'plugin_name': plugin_name,
- 'hadoop_version': hadoop_version,
- 'node_processes': node_processes,
- 'flavor_id': flavor_id,
- 'node_configs': node_configs or dict(),
- })
- return self._request_check_and_parse_resp(self.post, uri, 202,
- body=json.dumps(body))
-
- def delete_node_group_template(self, tmpl_id):
- """Deletes the specified node group template by id."""
-
- uri = 'node-group-templates/%s' % tmpl_id
- return self._request_and_check_resp(self.delete, uri, 204)
-
- def list_plugins(self):
- """List all enabled plugins."""
-
- uri = 'plugins'
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def get_plugin(self, plugin_name, plugin_version=None):
- """Returns the details of a single plugin."""
-
- uri = 'plugins/%s' % plugin_name
- if plugin_version:
- uri += '/%s' % plugin_version
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def list_cluster_templates(self):
- """List all cluster templates for a user."""
-
- uri = 'cluster-templates'
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def get_cluster_template(self, tmpl_id):
- """Returns the details of a single cluster template."""
-
- uri = 'cluster-templates/%s' % tmpl_id
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def create_cluster_template(self, name, plugin_name, hadoop_version,
- node_groups, cluster_configs=None,
- **kwargs):
- """Creates cluster template with specified params.
-
- It supports passing additional params using kwargs and returns created
- object.
- """
- uri = 'cluster-templates'
- body = kwargs.copy()
- body.update({
- 'name': name,
- 'plugin_name': plugin_name,
- 'hadoop_version': hadoop_version,
- 'node_groups': node_groups,
- 'cluster_configs': cluster_configs or dict(),
- })
- return self._request_check_and_parse_resp(self.post, uri, 202,
- body=json.dumps(body))
-
- def delete_cluster_template(self, tmpl_id):
- """Deletes the specified cluster template by id."""
-
- uri = 'cluster-templates/%s' % tmpl_id
- return self._request_and_check_resp(self.delete, uri, 204)
-
- def list_data_sources(self):
- """List all data sources for a user."""
-
- uri = 'data-sources'
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def get_data_source(self, source_id):
- """Returns the details of a single data source."""
-
- uri = 'data-sources/%s' % source_id
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def create_data_source(self, name, data_source_type, url, **kwargs):
- """Creates data source with specified params.
-
- It supports passing additional params using kwargs and returns created
- object.
- """
- uri = 'data-sources'
- body = kwargs.copy()
- body.update({
- 'name': name,
- 'type': data_source_type,
- 'url': url
- })
- return self._request_check_and_parse_resp(self.post, uri,
- 202, body=json.dumps(body))
-
- def delete_data_source(self, source_id):
- """Deletes the specified data source by id."""
-
- uri = 'data-sources/%s' % source_id
- return self._request_and_check_resp(self.delete, uri, 204)
-
- def list_job_binary_internals(self):
- """List all job binary internals for a user."""
-
- uri = 'job-binary-internals'
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def get_job_binary_internal(self, job_binary_id):
- """Returns the details of a single job binary internal."""
-
- uri = 'job-binary-internals/%s' % job_binary_id
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def create_job_binary_internal(self, name, data):
- """Creates job binary internal with specified params."""
-
- uri = 'job-binary-internals/%s' % name
- return self._request_check_and_parse_resp(self.put, uri, 202, data)
-
- def delete_job_binary_internal(self, job_binary_id):
- """Deletes the specified job binary internal by id."""
-
- uri = 'job-binary-internals/%s' % job_binary_id
- return self._request_and_check_resp(self.delete, uri, 204)
-
- def get_job_binary_internal_data(self, job_binary_id):
- """Returns data of a single job binary internal."""
-
- uri = 'job-binary-internals/%s/data' % job_binary_id
- return self._request_and_check_resp_data(self.get, uri, 200)
-
- def list_job_binaries(self):
- """List all job binaries for a user."""
-
- uri = 'job-binaries'
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def get_job_binary(self, job_binary_id):
- """Returns the details of a single job binary."""
-
- uri = 'job-binaries/%s' % job_binary_id
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def create_job_binary(self, name, url, extra=None, **kwargs):
- """Creates job binary with specified params.
-
- It supports passing additional params using kwargs and returns created
- object.
- """
- uri = 'job-binaries'
- body = kwargs.copy()
- body.update({
- 'name': name,
- 'url': url,
- 'extra': extra or dict(),
- })
- return self._request_check_and_parse_resp(self.post, uri,
- 202, body=json.dumps(body))
-
- def delete_job_binary(self, job_binary_id):
- """Deletes the specified job binary by id."""
-
- uri = 'job-binaries/%s' % job_binary_id
- return self._request_and_check_resp(self.delete, uri, 204)
-
- def get_job_binary_data(self, job_binary_id):
- """Returns data of a single job binary."""
-
- uri = 'job-binaries/%s/data' % job_binary_id
- return self._request_and_check_resp_data(self.get, uri, 200)
-
- def list_jobs(self):
- """List all jobs for a user."""
-
- uri = 'jobs'
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def get_job(self, job_id):
- """Returns the details of a single job."""
-
- uri = 'jobs/%s' % job_id
- return self._request_check_and_parse_resp(self.get, uri, 200)
-
- def create_job(self, name, job_type, mains, libs=None, **kwargs):
- """Creates job with specified params.
-
- It supports passing additional params using kwargs and returns created
- object.
- """
- uri = 'jobs'
- body = kwargs.copy()
- body.update({
- 'name': name,
- 'type': job_type,
- 'mains': mains,
- 'libs': libs or list(),
- })
- return self._request_check_and_parse_resp(self.post, uri,
- 202, body=json.dumps(body))
-
- def delete_job(self, job_id):
- """Deletes the specified job by id."""
-
- uri = 'jobs/%s' % job_id
- return self._request_and_check_resp(self.delete, uri, 204)