Merge "Allow 'mac_address' updatable for neutron port"
diff --git a/api/__init__.py b/api/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/api/__init__.py
diff --git a/api/gabbits/environments.yaml b/api/gabbits/environments.yaml
new file mode 100644
index 0000000..17ac476
--- /dev/null
+++ b/api/gabbits/environments.yaml
@@ -0,0 +1,55 @@
+defaults:
+ request_headers:
+ X-Auth-Token: $ENVIRON['OS_TOKEN']
+
+tests:
+- name: environment with parameter
+ POST: /stacks
+ request_headers:
+ content-type: application/json
+ data:
+ files: {}
+ disable_rollback: true
+ parameters: {}
+ stack_name: $ENVIRON['PREFIX']-envstack
+ environment:
+ parameters:
+ test_val: test
+ template:
+ heat_template_version: '2016-04-08'
+ parameters:
+ test_val:
+ type: string
+ resources:
+ test:
+ type: OS::Heat::TestResource
+ properties:
+ value: {get_param: test_val}
+ outputs:
+ output_value:
+ value: {get_attr: [test, output]}
+
+ status: 201
+ response_headers:
+ location: //stacks/$ENVIRON['PREFIX']-envstack/[a-f0-9-]+/
+
+- name: poll for envstack CREATE_COMPLETE
+ GET: $LOCATION
+ redirects: True
+ poll:
+ count: 5
+ delay: 1.0
+ response_json_paths:
+ $.stack.stack_status: CREATE_COMPLETE
+
+- name: get stack output
+ GET: $LAST_URL/outputs/output_value
+ redirects: True
+ status: 200
+ response_json_paths:
+ $.output.output_value: test
+
+- name: delete envstack
+ DELETE: /stacks/$ENVIRON['PREFIX']-envstack
+ redirects: True
+ status: 204
diff --git a/api/gabbits/resources.yaml b/api/gabbits/resources.yaml
new file mode 100644
index 0000000..164f4cb
--- /dev/null
+++ b/api/gabbits/resources.yaml
@@ -0,0 +1,90 @@
+defaults:
+ request_headers:
+ X-Auth-Token: $ENVIRON['OS_TOKEN']
+
+tests:
+- name: create stack with resources
+ POST: /stacks
+ request_headers:
+ content-type: application/json
+ data:
+ files: {}
+ disable_rollback: true
+ parameters: {}
+ stack_name: $ENVIRON['PREFIX']-rsrcstack
+ template:
+ heat_template_version: '2016-04-08'
+ parameters:
+ test_val:
+ type: string
+ default: test
+ resources:
+ test:
+ type: OS::Heat::TestResource
+ properties:
+ value: {get_param: test_val}
+
+ status: 201
+ response_headers:
+ location: //stacks/$ENVIRON['PREFIX']-rsrcstack/[a-f0-9-]+/
+
+- name: poll for rsrcstack CREATE_COMPLETE
+ GET: $LOCATION
+ redirects: True
+ poll:
+ count: 5
+ delay: 1.0
+ response_json_paths:
+ $.stack.stack_status: CREATE_COMPLETE
+
+- name: list resources
+ GET: $LAST_URL/resources
+ request_headers:
+ content-type: application/json
+ status: 200
+ response_json_paths:
+ $.resources[0].logical_resource_id: test
+ $.resources[0].resource_status: CREATE_COMPLETE
+
+- name: list filtered resources
+ GET: $LAST_URL
+ request_headers:
+ content-type: application/json
+ query_parameters:
+ type: OS::Nova::Server
+ status: 200
+ response_json_paths:
+ $.resources: []
+
+- name: show resource
+ GET: $LAST_URL/test
+ request_headers:
+ content-type: application/json
+ status: 200
+ response_json_paths:
+ $.resource.attributes.output: test
+
+- name: mark resource unhealthy
+ PATCH: $LAST_URL
+ request_headers:
+ content-type: application/json
+ data:
+ mark_unhealthy: true
+ resource_status_reason: 'resource deleted'
+ status: 200
+
+- name: show unhealthy resource
+ GET: $LAST_URL
+ status: 200
+ response_json_paths:
+ $.resource.resource_status: CHECK_FAILED
+ $.resource.resource_status_reason: 'resource deleted'
+
+- name: signal resource
+ POST: $LAST_URL/signal
+ status: 200
+
+- name: delete stack with resources
+ DELETE: /stacks/$ENVIRON['PREFIX']-rsrcstack
+ redirects: True
+ status: 204
diff --git a/api/gabbits/resourcetypes.yaml b/api/gabbits/resourcetypes.yaml
new file mode 100644
index 0000000..0730cc8
--- /dev/null
+++ b/api/gabbits/resourcetypes.yaml
@@ -0,0 +1,24 @@
+defaults:
+ request_headers:
+ X-Auth-Token: $ENVIRON['OS_TOKEN']
+
+tests:
+- name: list resource types
+ GET: /resource_types
+ status: 200
+
+- name: show resource type
+ GET: /resource_types/OS::Heat::TestResource
+ status: 200
+ response_json_paths:
+ $.support_status.status: SUPPORTED
+ $.properties.wait_secs.default: 0
+
+- name: resource type template
+ GET: /resource_types/OS::Heat::TestResource/template
+ query_parameters:
+ template_type: hot
+ status: 200
+ response_json_paths:
+ $.resources.TestResource.type: OS::Heat::TestResource
+ $.heat_template_version: '2016-10-14'
diff --git a/api/gabbits/stacks.yaml b/api/gabbits/stacks.yaml
new file mode 100644
index 0000000..3028018
--- /dev/null
+++ b/api/gabbits/stacks.yaml
@@ -0,0 +1,47 @@
+defaults:
+ request_headers:
+ X-Auth-Token: $ENVIRON['OS_TOKEN']
+
+tests:
+- name: stack list
+ GET: /stacks
+ status: 200
+ response_headers:
+ content-type: application/json; charset=UTF-8
+
+- name: create empty stack
+ POST: /stacks
+ request_headers:
+ content-type: application/json
+ data:
+ files: {}
+ disable_rollback: true
+ parameters: {}
+ stack_name: $ENVIRON['PREFIX']-empty
+ environment: {}
+ template:
+ heat_template_version: '2016-04-08'
+
+ status: 201
+ response_headers:
+ location: //stacks/$ENVIRON['PREFIX']-empty/[a-f0-9-]+/
+
+
+- name: poll for empty CREATE_COMPLETE
+ GET: $LOCATION
+ redirects: True
+ poll:
+ count: 5
+ delay: 1.0
+ response_json_paths:
+ $.stack.stack_status: CREATE_COMPLETE
+
+- name: show empty stack
+ GET: $LAST_URL
+ redirects: True
+ status: 200
+
+- name: delete empty stack
+ DELETE: $LAST_URL
+ redirects: True
+ status: 204
diff --git a/api/gabbits/templates.yaml b/api/gabbits/templates.yaml
new file mode 100644
index 0000000..7b67054
--- /dev/null
+++ b/api/gabbits/templates.yaml
@@ -0,0 +1,37 @@
+defaults:
+ request_headers:
+ X-Auth-Token: $ENVIRON['OS_TOKEN']
+
+tests:
+- name: list template versions
+ GET: /template_versions
+ status: 200
+ response_json_paths:
+ $.template_versions[?(@.version='heat_template_version.2017-02-24')].type: hot
+
+- name: list template functions
+ GET: /template_versions/heat_template_version.2016-10-14/functions
+ status: 200
+ response_json_paths:
+ $.template_functions[?(@.functions='get_file')].description:
+ A function for including a file inline.
+
+- name: template validate
+ POST: /validate
+ request_headers:
+ content-type: application/json
+ data:
+ template:
+ heat_template_version: '2016-04-08'
+ parameters:
+ test_val:
+ type: string
+ resources:
+ test:
+ type: OS::Heat::TestResource
+ properties:
+ value: {get_param: test_val}
+ outputs:
+ output_value:
+ value: {get_attr: [test, output]}
+ status: 200
diff --git a/api/test_heat_api.py b/api/test_heat_api.py
new file mode 100644
index 0000000..2e219e7
--- /dev/null
+++ b/api/test_heat_api.py
@@ -0,0 +1,44 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""A test module to exercise the Heat API with gabbi. """
+
+import os
+
+from gabbi import driver
+from six.moves.urllib import parse as urlparse
+
+from heat_integrationtests.common import clients
+from heat_integrationtests.common import config
+from heat_integrationtests.common import test
+
+TESTS_DIR = 'gabbits'
+
+
+def load_tests(loader, tests, pattern):
+ """Provide a TestSuite to the discovery process."""
+ test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
+
+ conf = config.CONF.heat_plugin
+ if conf.auth_url is None:
+ # It's not configured, let's not load tests
+ return
+ manager = clients.ClientManager(conf)
+ endpoint = manager.identity_client.get_endpoint_url(
+ 'orchestration', conf.region)
+ host = urlparse.urlparse(endpoint).hostname
+ os.environ['OS_TOKEN'] = manager.identity_client.auth_token
+ os.environ['PREFIX'] = test.rand_name('api')
+
+ return driver.build_tests(test_dir, loader, host=host,
+ url=endpoint, test_loader_name=__name__)
diff --git a/common/clients.py b/common/clients.py
index 96a34b7..9949d3d 100644
--- a/common/clients.py
+++ b/common/clients.py
@@ -123,6 +123,8 @@
password=self._password())
def _get_identity_client(self):
+ user_domain_id = self.conf.user_domain_id
+ project_domain_id = self.conf.project_domain_id
user_domain_name = self.conf.user_domain_name
project_domain_name = self.conf.project_domain_name
kwargs = {
@@ -134,6 +136,8 @@
# keystone v2 can't ignore domain details
if self.auth_version == '3':
kwargs.update({
+ 'user_domain_id': user_domain_id,
+ 'project_domain_id': project_domain_id,
'user_domain_name': user_domain_name,
'project_domain_name': project_domain_name})
auth = password.Password(**kwargs)
@@ -154,7 +158,7 @@
service_type='compute',
endpoint_type='publicURL',
region_name=region,
- no_cache=True,
+ os_cache=False,
insecure=self.insecure,
cacert=self.ca_file,
http_log_debug=True)
diff --git a/common/config.py b/common/config.py
index 3cef5aa..f7b9d6c 100644
--- a/common/config.py
+++ b/common/config.py
@@ -53,6 +53,12 @@
cfg.StrOpt('project_domain_name',
help="Project domain name, if keystone v3 auth_url"
"is used"),
+ cfg.StrOpt('user_domain_id',
+ help="User domain id, if keystone v3 auth_url"
+ "is used"),
+ cfg.StrOpt('project_domain_id',
+ help="Project domain id, if keystone v3 auth_url"
+ "is used"),
cfg.StrOpt('region',
help="The region name to use"),
cfg.StrOpt('instance_type',
@@ -144,7 +150,7 @@
help='Count of retries to edit config file during sighup. If '
'another worker already edit config file, file can be '
'busy, so need to wait and try edit file again.'),
- cfg.StrOpt('heat-config-notify-script',
+ cfg.StrOpt('heat_config_notify_script',
default=('heat-config-notify'),
help="Path to the script heat-config-notify"),
diff --git a/functional/test_cancel_update.py b/functional/test_cancel_update.py
index f6ddc07..bfeeda6 100644
--- a/functional/test_cancel_update.py
+++ b/functional/test_cancel_update.py
@@ -53,7 +53,7 @@
stack_identifier = self.stack_create(template=self.template,
parameters=parameters)
- parameters['InstanceType'] = 'm1.large'
+ parameters['InstanceType'] = self.conf.instance_type
self.update_stack(stack_identifier, self.template,
parameters=parameters,
expected_status='UPDATE_IN_PROGRESS')
diff --git a/functional/test_conditional_exposure.py b/functional/test_conditional_exposure.py
index bf6cc47..69cff79 100644
--- a/functional/test_conditional_exposure.py
+++ b/functional/test_conditional_exposure.py
@@ -23,13 +23,16 @@
unavailable_service = 'Sahara'
unavailable_template = """
heat_template_version: 2015-10-15
+parameters:
+ instance_type:
+ type: string
resources:
not_available:
type: OS::Sahara::NodeGroupTemplate
properties:
plugin_name: fake
hadoop_version: 0.1
- flavor: m1.large
+ flavor: {get_param: instance_type}
node_processes: []
"""
@@ -56,9 +59,11 @@
def test_unavailable_resources_not_created(self):
stack_name = self._stack_rand_name()
+ parameters = {'instance_type': self.conf.minimal_instance_type}
ex = self.assertRaises(exc.HTTPBadRequest,
self.client.stacks.create,
stack_name=stack_name,
+ parameters=parameters,
template=self.unavailable_template)
self.assertIn('ResourceTypeUnavailable', ex.message)
self.assertIn('OS::Sahara::NodeGroupTemplate', ex.message)
diff --git a/functional/test_event_sinks.py b/functional/test_event_sinks.py
index e4a23ff..ea66b7d 100644
--- a/functional/test_event_sinks.py
+++ b/functional/test_event_sinks.py
@@ -43,7 +43,11 @@
'os_username': self.conf.username,
'os_password': self.conf.password,
'os_project_name': self.conf.tenant_name,
- 'os_auth_url': self.conf.auth_url
+ 'os_auth_url': self.conf.auth_url,
+ 'os_user_domain_id': self.conf.user_domain_id,
+ 'os_project_domain_id': self.conf.project_domain_id,
+ 'os_user_domain_name': self.conf.user_domain_name,
+ 'os_project_domain_name': self.conf.project_domain_name
}
}
}
diff --git a/functional/test_heat_autoscaling.py b/functional/test_heat_autoscaling.py
index d41e203..25ceb1c 100644
--- a/functional/test_heat_autoscaling.py
+++ b/functional/test_heat_autoscaling.py
@@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
+from heat_integrationtests.common import test
from heat_integrationtests.functional import functional_base
@@ -21,20 +22,38 @@
random_group:
type: OS::Heat::AutoScalingGroup
properties:
- max_size: 10
- min_size: 10
+ cooldown: 0
+ desired_capacity: 3
+ max_size: 5
+ min_size: 2
resource:
type: OS::Heat::RandomString
+ scale_up_policy:
+ type: OS::Heat::ScalingPolicy
+ properties:
+ adjustment_type: change_in_capacity
+ auto_scaling_group_id: { get_resource: random_group }
+ scaling_adjustment: 1
+
+ scale_down_policy:
+ type: OS::Heat::ScalingPolicy
+ properties:
+ adjustment_type: change_in_capacity
+ auto_scaling_group_id: { get_resource: random_group }
+ scaling_adjustment: -1
+
outputs:
all_values:
value: {get_attr: [random_group, outputs_list, value]}
value_0:
value: {get_attr: [random_group, resource.0.value]}
- value_5:
- value: {get_attr: [random_group, resource.5.value]}
- value_9:
- value: {get_attr: [random_group, resource.9.value]}
+ value_1:
+ value: {get_attr: [random_group, resource.1.value]}
+ value_2:
+ value: {get_attr: [random_group, resource.2.value]}
+ asg_size:
+ value: {get_attr: [random_group, current_size]}
'''
template_nested = '''
@@ -44,8 +63,9 @@
random_group:
type: OS::Heat::AutoScalingGroup
properties:
- max_size: 10
- min_size: 10
+ desired_capacity: 3
+ max_size: 5
+ min_size: 2
resource:
type: randomstr.yaml
@@ -54,10 +74,10 @@
value: {get_attr: [random_group, outputs_list, random_str]}
value_0:
value: {get_attr: [random_group, resource.0.random_str]}
- value_5:
- value: {get_attr: [random_group, resource.5.random_str]}
- value_9:
- value: {get_attr: [random_group, resource.9.random_str]}
+ value_1:
+ value: {get_attr: [random_group, resource.1.random_str]}
+ value_2:
+ value: {get_attr: [random_group, resource.2.random_str]}
'''
template_randomstr = '''
@@ -75,14 +95,74 @@
def _assert_output_values(self, stack_id):
stack = self.client.stacks.get(stack_id)
all_values = self._stack_output(stack, 'all_values')
- self.assertEqual(10, len(all_values))
+ self.assertEqual(3, len(all_values))
self.assertEqual(all_values[0], self._stack_output(stack, 'value_0'))
- self.assertEqual(all_values[5], self._stack_output(stack, 'value_5'))
- self.assertEqual(all_values[9], self._stack_output(stack, 'value_9'))
+ self.assertEqual(all_values[1], self._stack_output(stack, 'value_1'))
+ self.assertEqual(all_values[2], self._stack_output(stack, 'value_2'))
+
+ def test_asg_scale_up_max_size(self):
+ stack_id = self.stack_create(template=self.template,
+ expected_status='CREATE_COMPLETE')
+ stack = self.client.stacks.get(stack_id)
+ asg_size = self._stack_output(stack, 'asg_size')
+ # Ensure that initial desired capacity is met
+ self.assertEqual(3, asg_size)
+
+ # send scale up signals and ensure that asg honors max_size
+ asg = self.client.resources.get(stack_id, 'random_group')
+ max_size = 5
+ for num in range(asg_size+1, max_size+2):
+ expected_resources = num if num <= max_size else max_size
+ self.client.resources.signal(stack_id, 'scale_up_policy')
+ test.call_until_true(self.conf.build_timeout,
+ self.conf.build_interval,
+ self.check_autoscale_complete,
+ asg.physical_resource_id, expected_resources)
+
+ def test_asg_scale_down_min_size(self):
+ stack_id = self.stack_create(template=self.template,
+ expected_status='CREATE_COMPLETE')
+ stack = self.client.stacks.get(stack_id)
+ asg_size = self._stack_output(stack, 'asg_size')
+ # Ensure that initial desired capacity is met
+ self.assertEqual(3, asg_size)
+
+ # send scale down signals and ensure that asg honors min_size
+ asg = self.client.resources.get(stack_id, 'random_group')
+ min_size = 2
+ for num in range(asg_size-1, 0, -1):
+ expected_resources = num if num >= min_size else min_size
+ self.client.resources.signal(stack_id, 'scale_down_policy')
+ test.call_until_true(self.conf.build_timeout,
+ self.conf.build_interval,
+ self.check_autoscale_complete,
+ asg.physical_resource_id, expected_resources)
+
+ def test_asg_cooldown(self):
+ cooldown_tmpl = self.template.replace('cooldown: 0',
+ 'cooldown: 10')
+ stack_id = self.stack_create(template=cooldown_tmpl,
+ expected_status='CREATE_COMPLETE')
+ stack = self.client.stacks.get(stack_id)
+ asg_size = self._stack_output(stack, 'asg_size')
+ # Ensure that initial desired capacity is met
+ self.assertEqual(3, asg_size)
+
+ # send scale up signal.
+ # Since cooldown is in effect, number of resources should not change
+ asg = self.client.resources.get(stack_id, 'random_group')
+ expected_resources = 3
+ self.client.resources.signal(stack_id, 'scale_up_policy')
+ test.call_until_true(self.conf.build_timeout,
+ self.conf.build_interval,
+ self.check_autoscale_complete,
+ asg.physical_resource_id, expected_resources)
def test_path_attrs(self):
stack_id = self.stack_create(template=self.template)
- expected_resources = {'random_group': 'OS::Heat::AutoScalingGroup'}
+ expected_resources = {'random_group': 'OS::Heat::AutoScalingGroup',
+ 'scale_up_policy': 'OS::Heat::ScalingPolicy',
+ 'scale_down_policy': 'OS::Heat::ScalingPolicy'}
self.assertEqual(expected_resources, self.list_resources(stack_id))
self._assert_output_values(stack_id)
diff --git a/functional/test_reload_on_sighup.py b/functional/test_reload_on_sighup.py
index b014f49..d646581 100644
--- a/functional/test_reload_on_sighup.py
+++ b/functional/test_reload_on_sighup.py
@@ -10,6 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
+import re
+import subprocess
import time
import eventlet
@@ -26,6 +28,13 @@
self.config_file = "/etc/heat/heat.conf"
super(ReloadOnSighupTest, self).setUp()
+ def _is_mod_wsgi_daemon(self, service):
+ process = ''.join(['wsgi:', service[:9]]).replace('_', '-')
+ s = subprocess.Popen(["ps", "ax"], stdout=subprocess.PIPE)
+ for x in s.stdout:
+ if re.search(process, x):
+ return True
+
def _set_config_value(self, service, key, value):
config = configparser.ConfigParser()
@@ -116,11 +125,17 @@
# revert all the changes made
self._change_config(service, new_workers, old_workers)
+ def _reload_on_sighup(self, service):
+ if not self._is_mod_wsgi_daemon(service):
+ self._reload(service)
+ else:
+ self.skipTest('Skipping Test, Service running under httpd.')
+
def test_api_reload_on_sighup(self):
- self._reload('heat_api')
+ self._reload_on_sighup('heat_api')
def test_api_cfn_reload_on_sighup(self):
- self._reload('heat_api_cfn')
+ self._reload_on_sighup('heat_api_cfn')
def test_api_cloudwatch_on_sighup(self):
- self._reload('heat_api_cloudwatch')
+ self._reload_on_sighup('heat_api_cloudwatch')
diff --git a/functional/test_resource_chain.py b/functional/test_resource_chain.py
index 1086e74..2898ebe 100644
--- a/functional/test_resource_chain.py
+++ b/functional/test_resource_chain.py
@@ -58,7 +58,7 @@
# Verify
stack = self.client.stacks.get(stack_id)
- self.assertTrue(stack is not None)
+ self.assertIsNotNone(stack)
# Top-level resource for chain
expected = {'my-chain': 'OS::Heat::ResourceChain'}
@@ -74,15 +74,15 @@
# Outputs
resource_ids = self._stack_output(stack, 'resource-ids')
- self.assertTrue(resource_ids is not None)
+ self.assertIsNotNone(resource_ids)
self.assertEqual(2, len(resource_ids))
resource_value = self._stack_output(stack, 'resource-0-value')
- self.assertTrue(resource_value is not None)
+ self.assertIsNotNone(resource_value)
self.assertEqual(8, len(resource_value)) # from parameter
resource_attrs = self._stack_output(stack, 'all-resource-attrs')
- self.assertTrue(resource_attrs is not None)
+ self.assertIsNotNone(resource_attrs)
self.assertIsInstance(resource_attrs, dict)
self.assertEqual(2, len(resource_attrs))
self.assertEqual(8, len(resource_attrs['0']))
diff --git a/functional/test_software_config.py b/functional/test_software_config.py
index 860d688..815893f 100644
--- a/functional/test_software_config.py
+++ b/functional/test_software_config.py
@@ -149,7 +149,7 @@
iv = dict((i['name'], i['value']) for i in dep['inputs'])
sigurl = iv.get('deploy_signal_id')
requests.post(sigurl, data='{}',
- headers={'content-type': None},
+ headers={'content-type': 'application/json'},
verify=self.verify_cert)
diff --git a/functional/test_stack_events.py b/functional/test_stack_events.py
index 3638fab..d5a7fad 100644
--- a/functional/test_stack_events.py
+++ b/functional/test_stack_events.py
@@ -74,7 +74,7 @@
stack_event.resource_name)
# Resource events are a subset of the original stack event list
- self.assertTrue(len(resource_events) < len(stack_events))
+ self.assertLess(len(resource_events), len(stack_events))
# Get the event details for each resource event
for resource_event in resource_events:
diff --git a/scenario/templates/test_server_signal.yaml b/scenario/templates/test_server_signal.yaml
index dfb1155..4466a5e 100644
--- a/scenario/templates/test_server_signal.yaml
+++ b/scenario/templates/test_server_signal.yaml
@@ -23,6 +23,9 @@
dns_servers:
type: comma_delimited_list
default: ["8.8.8.8", "8.8.4.4"]
+ user_data_format:
+ type: string
+ default: RAW
resources:
sg:
type: OS::Neutron::SecurityGroup
@@ -76,7 +79,7 @@
- subnet: {get_resource: subnet}
security_groups:
- {get_resource: sg}
- user_data_format: RAW
+ user_data_format: {get_param: user_data_format}
user_data:
str_replace:
template: |
diff --git a/scenario/templates/test_volumes_create_from_backup.yaml b/scenario/templates/test_volumes_create_from_backup.yaml
index d6eadd1..ab1edf8 100644
--- a/scenario/templates/test_volumes_create_from_backup.yaml
+++ b/scenario/templates/test_volumes_create_from_backup.yaml
@@ -8,7 +8,6 @@
instance_type:
type: string
description: Type of the instance to be created.
- default: m1.small
image_id:
type: string
diff --git a/scenario/templates/test_volumes_delete_snapshot.yaml b/scenario/templates/test_volumes_delete_snapshot.yaml
index 08f84f1..3893b52 100644
--- a/scenario/templates/test_volumes_delete_snapshot.yaml
+++ b/scenario/templates/test_volumes_delete_snapshot.yaml
@@ -8,7 +8,6 @@
instance_type:
type: string
description: Type of the instance to be created.
- default: m1.small
image_id:
type: string
diff --git a/scenario/test_server_signal.py b/scenario/test_server_signal.py
index 4567e43..b2085e2 100644
--- a/scenario/test_server_signal.py
+++ b/scenario/test_server_signal.py
@@ -20,13 +20,15 @@
class ServerSignalIntegrationTest(scenario_base.ScenarioTestsBase):
"""Test a server in a created network can signal to heat."""
- def test_server_signal(self):
+ def _test_server_signal(self, user_data_format='RAW',
+ image=None):
"""Check a server in a created network can signal to heat."""
parameters = {
'key_name': self.keypair_name,
'flavor': self.conf.minimal_instance_type,
- 'image': self.conf.minimal_image_ref,
+ 'image': image,
'timeout': self.conf.build_timeout,
+ 'user_data_format': user_data_format
}
# Launch stack
@@ -72,3 +74,12 @@
self._log_console_output(servers=[server])
self.fail(
"Timed out waiting for %s to become reachable" % server_ip)
+
+ def test_server_signal_userdata_format_raw(self):
+ self._test_server_signal(image=self.conf.minimal_image_ref)
+
+ def test_server_signal_userdata_format_software_config(self):
+ if not self.conf.image_ref:
+ raise self.skipException("No image configured to test")
+ self._test_server_signal(user_data_format='SOFTWARE_CONFIG',
+ image=self.conf.image_ref)
diff --git a/scenario/test_server_software_config.py b/scenario/test_server_software_config.py
index df35042..f4c7da5 100644
--- a/scenario/test_server_software_config.py
+++ b/scenario/test_server_software_config.py
@@ -100,7 +100,7 @@
res1['result'])
self.assertEqual(0, res1['status_code'])
self.assertEqual('Output to stderr\n', res1['stderr'])
- self.assertTrue(len(res1['stdout']) > 0)
+ self.assertGreater(len(res1['stdout']), 0)
res2 = self._stack_output(stack, 'res2')
self.assertEqual(
@@ -118,7 +118,7 @@
res3['result'])
self.assertEqual(0, res3['status_code'])
self.assertEqual('', res3['stderr'])
- self.assertTrue(len(res1['stdout']) > 0)
+ self.assertGreater(len(res1['stdout']), 0)
dep1_resource = self.client.resources.get(sid, 'dep1')
dep1_id = dep1_resource.physical_resource_id
diff --git a/scenario/test_volumes.py b/scenario/test_volumes.py
index 603c8f2..47e583d 100644
--- a/scenario/test_volumes.py
+++ b/scenario/test_volumes.py
@@ -46,7 +46,7 @@
self.assertEqual(self.volume_description,
self._stack_output(stack, 'display_description'))
- def check_stack(self, stack_id):
+ def check_stack(self, stack_id, parameters):
stack = self.client.stacks.get(stack_id)
# Verify with cinder that the volume exists, with matching details
@@ -75,6 +75,7 @@
try:
stack_identifier2 = self.launch_stack(
template_name='test_volumes_create_from_backup.yaml',
+ parameters=parameters,
add_parameters={'backup_id': backup.id})
stack2 = self.client.stacks.get(stack_identifier2)
except exceptions.StackBuildErrorException:
@@ -125,4 +126,4 @@
)
# Check stack
- self.check_stack(stack_id)
+ self.check_stack(stack_id, parameters)