Merge "fix gnocchi unprocessed measures debug"
diff --git a/ceilometer/tests/integration/gabbi/gabbits-live/aodh-gnocchi-threshold-alarm.yaml b/ceilometer/tests/integration/gabbi/gabbits-live/aodh-gnocchi-threshold-alarm.yaml
index daef65c..65b00e9 100644
--- a/ceilometer/tests/integration/gabbi/gabbits-live/aodh-gnocchi-threshold-alarm.yaml
+++ b/ceilometer/tests/integration/gabbi/gabbits-live/aodh-gnocchi-threshold-alarm.yaml
@@ -216,7 +216,7 @@
data: '"ok"'
status: 200
response_strings:
- "ok"
+ - "ok"
- name: search 'gabbi-gnocchi-threshold-resource-alarm' alarm exist
desc: search for alarm using user_id, project_id, alarm_name
diff --git a/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml b/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml
index f0435f3..3670925 100644
--- a/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml
+++ b/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml
@@ -26,18 +26,6 @@
data: <@create_stack.json
status: 201
- - name: waiting for stack creation
- desc: Wait for the second event on the stack resource, it can be a success or failure
- url: $ENVIRON['HEAT_SERVICE_URL']/stacks/integration_test/events?resource_name=integration_test
- redirects: true
- method: GET
- status: 200
- poll:
- count: 300
- delay: 1
- response_json_paths:
- $.events[1].resource_name: integration_test
-
- name: control stack status
desc: Checks the stack have been created successfully
url: $ENVIRON['HEAT_SERVICE_URL']/stacks/integration_test
@@ -45,7 +33,7 @@
method: GET
status: 200
poll:
- count: 5
+ count: 300
delay: 1
response_json_paths:
$.stack.stack_status: "CREATE_COMPLETE"
@@ -126,18 +114,6 @@
data: <@update_stack.json
status: 202
- - name: waiting for stack update
- desc: Wait for the third event on the stack resource, it can be a success or failure
- url: $ENVIRON['HEAT_SERVICE_URL']/stacks/integration_test/events?resource_name=integration_test
- redirects: true
- method: GET
- status: 200
- poll:
- count: 300
- delay: 1
- response_json_paths:
- $.events[3].resource_name: integration_test
-
- name: control stack status
desc: Checks the stack have been created successfully
url: $ENVIRON['HEAT_SERVICE_URL']/stacks/integration_test
@@ -145,7 +121,7 @@
method: GET
status: 200
poll:
- count: 5
+ count: 300
delay: 1
response_json_paths:
$.stack.stack_status: "UPDATE_COMPLETE"
diff --git a/ceilometer/tests/integration/gabbi/gabbits-live/create_stack.json b/ceilometer/tests/integration/gabbi/gabbits-live/create_stack.json
index 3f2971a..5078d06 100644
--- a/ceilometer/tests/integration/gabbi/gabbits-live/create_stack.json
+++ b/ceilometer/tests/integration/gabbi/gabbits-live/create_stack.json
@@ -13,7 +13,7 @@
"type": "OS::Nova::Server",
"properties": {
"networks": [{ "network": "private" }],
- "flavor": "m1.tiny",
+ "flavor": "$ENVIRON['NOVA_FLAVOR_REF']",
"image": "$ENVIRON['GLANCE_IMAGE_NAME']",
"metadata": {
"metering.server_group": { "get_param": "OS::stack_id" }
diff --git a/ceilometer/tests/integration/gabbi/gabbits-live/update_stack.json b/ceilometer/tests/integration/gabbi/gabbits-live/update_stack.json
index d593b0b..2ba9b53 100644
--- a/ceilometer/tests/integration/gabbi/gabbits-live/update_stack.json
+++ b/ceilometer/tests/integration/gabbi/gabbits-live/update_stack.json
@@ -12,7 +12,7 @@
"type": "OS::Nova::Server",
"properties": {
"networks": [{ "network": "private" }],
- "flavor": "m1.tiny",
+ "flavor": "$ENVIRON['NOVA_FLAVOR_REF']",
"image": "$ENVIRON['GLANCE_IMAGE_NAME']",
"metadata": {
"metering.server_group": { "get_param": "OS::stack_id" }
diff --git a/ceilometer/tests/integration/hooks/post_test_hook.sh b/ceilometer/tests/integration/hooks/post_test_hook.sh
index cd211d3..c16a74d 100755
--- a/ceilometer/tests/integration/hooks/post_test_hook.sh
+++ b/ceilometer/tests/integration/hooks/post_test_hook.sh
@@ -77,48 +77,13 @@
}
-# If we're running in the gate find our keystone endpoint to give to
-# gabbi tests and do a chown. Otherwise the existing environment
-# should provide URL and TOKEN.
-if [ -d $BASE/new/devstack ]; then
- export CEILOMETER_DIR="$BASE/new/ceilometer"
- STACK_USER=stack
- sudo chown -R $STACK_USER:stack $CEILOMETER_DIR
- source $BASE/new/devstack/openrc admin admin
- # Go to the ceilometer dir
- cd $CEILOMETER_DIR
-fi
-
-openstack catalog list
-export AODH_SERVICE_URL=$(openstack catalog show alarming -c endpoints -f value | awk '/public/{print $2}')
-export PANKO_SERVICE_URL=$(openstack catalog show event -c endpoints -f value | awk '/public/{print $2}')
-export GNOCCHI_SERVICE_URL=$(openstack catalog show metric -c endpoints -f value | awk '/public/{print $2}')
-export HEAT_SERVICE_URL=$(openstack catalog show orchestration -c endpoints -f value | awk '/public/{print $2}')
-export NOVA_SERVICE_URL=$(openstack catalog show compute -c endpoints -f value | awk '/public/{print $2}')
-export GLANCE_IMAGE_NAME=$(openstack image list | awk '/ cirros.* /{print $4; exit}')
-export ADMIN_TOKEN=$(openstack token issue -c id -f value)
-export OS_AUTH_TYPE=password
-
-# Run tests with gabbi
-echo "Running telemetry integration test suite"
-set +e
-sudo -E -H -u ${STACK_USER:-${USER}} tox -eintegration
+# Run tests with tempest
+sudo chown -R tempest:stack $BASE/new/tempest
+sudo chown -R tempest:stack $BASE/data/tempest
+cd $BASE/new/tempest
+sudo -H -u tempest OS_TEST_TIMEOUT=$TEMPEST_OS_TEST_TIMEOUT tox -eall-plugin -- ceilometer.tests.tempest.scenario.test_telemetry_integration --concurrency=$TEMPEST_CONCURRENCY
EXIT_CODE=$?
-
-if [ -d $BASE/new/devstack ]; then
- export_subunit_data "integration"
- generate_reports_and_maybe_exit $EXIT_CODE
-
- # NOTE(sileht): on swift job permissions are wrong, I don't known why
- sudo chown -R tempest:stack $BASE/new/tempest
- sudo chown -R tempest:stack $BASE/data/tempest
-
- # Run tests with tempest
- cd $BASE/new/tempest
- sudo -H -u tempest OS_TEST_TIMEOUT=$TEMPEST_OS_TEST_TIMEOUT tox -eall-plugin -- ceilometer.tests.tempest.scenario.test_autoscaling --concurrency=$TEMPEST_CONCURRENCY
- EXIT_CODE=$?
- export_subunit_data "all-plugin"
- generate_reports_and_maybe_exit $EXIT_CODE
-fi
+export_subunit_data "all-plugin"
+generate_reports_and_maybe_exit $EXIT_CODE
exit $EXIT_CODE
diff --git a/ceilometer/tests/tempest/scenario/test_autoscaling.py b/ceilometer/tests/tempest/scenario/test_autoscaling.py
deleted file mode 100644
index 38cc153..0000000
--- a/ceilometer/tests/tempest/scenario/test_autoscaling.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-import unittest
-
-from gabbi import driver
-from tempest import config
-
-from ceilometer.tests.tempest.scenario import manager
-
-
-class TestAutoscalingGabbi(manager.ScenarioTest):
- credentials = ['admin', 'primary']
-
- @classmethod
- def skip_checks(cls):
- super(TestAutoscalingGabbi, cls).skip_checks()
- for name in ["aodh_plugin", "gnocchi", "nova", "heat", "panko",
- "ceilometer", "glance"]:
- cls._check_service(name)
-
- @classmethod
- def _check_service(cls, name):
- if not getattr(config.CONF.service_available, name, False):
- raise cls.skipException("%s support is required" %
- name.capitalize())
-
- @classmethod
- def resource_setup(cls):
- super(TestAutoscalingGabbi, cls).resource_setup()
- test_dir = os.path.join(os.path.dirname(__file__), '..', '..',
- 'integration', 'gabbi', 'gabbits-live')
- cls.tests = driver.build_tests(
- test_dir, unittest.TestLoader(),
- host='localhost', port='13245',
- test_loader_name='tempest.scenario.telemetry-autoscaling.test')
-
- auth = cls.os_admin.auth_provider.get_auth()
- os.environ["ADMIN_TOKEN"] = auth[0]
- os.environ["AODH_SERVICE_URL"] = cls._get_endpoint_for(
- auth, "alarming_plugin")
- os.environ["GNOCCHI_SERVICE_URL"] = cls._get_endpoint_for(
- auth, "metric")
- os.environ["PANKO_SERVICE_URL"] = cls._get_endpoint_for(
- auth, "event")
- os.environ["HEAT_SERVICE_URL"] = cls._get_endpoint_for(
- auth, "orchestration")
- os.environ["NOVA_SERVICE_URL"] = cls._get_endpoint_for(auth, "compute")
- os.environ["GLANCE_SERVICE_URL"] = cls._get_endpoint_for(auth, "image")
-
- @staticmethod
- def clear_credentials():
- # FIXME(sileht): We don't want the token to be invalided, but
- # for some obcurs reason, clear_credentials is called before/during run
- # So, make the one used by tearDropClass a dump, and call it manually
- # in run()
- pass
-
- def run(self, result=None):
- self.setUp()
- os.environ["GLANCE_IMAGE_NAME"] = self.glance_image_create()
- try:
- self.tests.run(result)
- finally:
- super(TestAutoscalingGabbi, self).clear_credentials()
- self.tearDown()
-
- @staticmethod
- def _get_endpoint_for(auth, service):
- opt_section = getattr(config.CONF, service)
- endpoint_type = opt_section.endpoint_type
- is_keystone_v3 = 'catalog' in auth[1]
-
- if is_keystone_v3:
- if endpoint_type.endswith("URL"):
- endpoint_type = endpoint_type[:-3]
- catalog = auth[1]['catalog']
- endpoints = [e['endpoints'] for e in catalog
- if e['type'] == opt_section.catalog_type]
- if not endpoints:
- raise Exception("%s endpoint not found" %
- config.CONF.metric.catalog_type)
- endpoints = [e['url'] for e in endpoints[0]
- if e['interface'] == endpoint_type]
- if not endpoints:
- raise Exception("%s interface not found for endpoint %s" %
- (endpoint_type,
- config.CONF.metric.catalog_type))
- return endpoints[0]
-
- else:
- if not endpoint_type.endswith("URL"):
- endpoint_type += "URL"
- catalog = auth[1]['serviceCatalog']
- endpoints = [e for e in catalog
- if e['type'] == opt_section.catalog_type]
- if not endpoints:
- raise Exception("%s endpoint not found" %
- config.CONF.metric.catalog_type)
- return endpoints[0]['endpoints'][0][endpoint_type]
-
- @staticmethod
- def test_fake():
- # NOTE(sileht): A fake test is needed to have the class loaded
- # by the test runner
- pass
diff --git a/ceilometer/tests/tempest/scenario/test_telemetry_integration.py b/ceilometer/tests/tempest/scenario/test_telemetry_integration.py
new file mode 100644
index 0000000..0afef2f
--- /dev/null
+++ b/ceilometer/tests/tempest/scenario/test_telemetry_integration.py
@@ -0,0 +1,135 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import unittest
+
+from gabbi import runner
+from gabbi import suitemaker
+from gabbi import utils
+from tempest import config
+
+from ceilometer.tests.tempest.scenario import manager
+
+TEST_DIR = os.path.join(os.path.dirname(__file__), '..', '..',
+ 'integration', 'gabbi', 'gabbits-live')
+
+
+class TestTelemetryIntegration(manager.ScenarioTest):
+ credentials = ['admin', 'primary']
+
+ @classmethod
+ def skip_checks(cls):
+ super(TestTelemetryIntegration, cls).skip_checks()
+ for name in ["aodh_plugin", "gnocchi", "nova", "heat", "panko",
+ "ceilometer", "glance"]:
+ cls._check_service(name)
+
+ @classmethod
+ def _check_service(cls, name):
+ if not getattr(config.CONF.service_available, name, False):
+ raise cls.skipException("%s support is required" %
+ name.capitalize())
+
+ @staticmethod
+ def _get_endpoint(auth, service):
+ opt_section = getattr(config.CONF, service)
+ endpoint_type = opt_section.endpoint_type
+ is_keystone_v3 = 'catalog' in auth[1]
+
+ if is_keystone_v3:
+ if endpoint_type.endswith("URL"):
+ endpoint_type = endpoint_type[:-3]
+ catalog = auth[1]['catalog']
+ endpoints = [e['endpoints'] for e in catalog
+ if e['type'] == opt_section.catalog_type]
+ if not endpoints:
+ raise Exception("%s endpoint not found" %
+ opt_section.catalog_type)
+ endpoints = [e['url'] for e in endpoints[0]
+ if e['interface'] == endpoint_type]
+ if not endpoints:
+ raise Exception("%s interface not found for endpoint %s" %
+ (endpoint_type,
+ opt_section.catalog_type))
+ return endpoints[0]
+
+ else:
+ if not endpoint_type.endswith("URL"):
+ endpoint_type += "URL"
+ catalog = auth[1]['serviceCatalog']
+ endpoints = [e for e in catalog
+ if e['type'] == opt_section.catalog_type]
+ if not endpoints:
+ raise Exception("%s endpoint not found" %
+ opt_section.catalog_type)
+ return endpoints[0]['endpoints'][0][endpoint_type]
+
+ def _do_test(self, filename):
+ auth = self.os_admin.auth_provider.get_auth()
+
+ os.environ.update({
+ "ADMIN_TOKEN": auth[0],
+ "AODH_SERVICE_URL": self._get_endpoint(auth, "alarming_plugin"),
+ "GNOCCHI_SERVICE_URL": self._get_endpoint(auth, "metric"),
+ "PANKO_SERVICE_URL": self._get_endpoint(auth, "event"),
+ "HEAT_SERVICE_URL": self._get_endpoint(auth, "orchestration"),
+ "NOVA_SERVICE_URL": self._get_endpoint(auth, "compute"),
+ "GLANCE_SERVICE_URL": self._get_endpoint(auth, "image"),
+ "GLANCE_IMAGE_NAME": self.glance_image_create(),
+ "NOVA_FLAVOR_REF": config.CONF.compute.flavor_ref,
+ })
+
+ with file(os.path.join(TEST_DIR, filename)) as f:
+ test_suite = suitemaker.test_suite_from_dict(
+ loader=unittest.defaultTestLoader,
+ test_base_name="gabbi",
+ suite_dict=utils.load_yaml(f),
+ test_directory=TEST_DIR,
+ host=None, port=None,
+ fixture_module=None,
+ intercept=None,
+ handlers=runner.initialize_handlers([]),
+ test_loader_name="tempest")
+
+ # NOTE(sileht): We hide stdout/stderr and reraise the failure
+ # manually, tempest will print it itself.
+ with open(os.devnull, 'w') as stream:
+ result = unittest.TextTestRunner(
+ stream=stream, verbosity=0, failfast=True,
+ ).run(test_suite)
+
+ if not result.wasSuccessful():
+ failures = (result.errors + result.failures +
+ result.unexpectedSuccesses)
+ if failures:
+ test, bt = failures[0]
+ name = test.test_data.get('name', test.id())
+ msg = 'From test "%s" :\n%s' % (name, bt)
+ self.fail(msg)
+
+ self.assertTrue(result.wasSuccessful())
+
+
+def test_maker(name, filename):
+ def test(self):
+ self._do_test(filename)
+ test.__name__ = name
+ return test
+
+# Create one scenario per yaml file
+for filename in os.listdir(TEST_DIR):
+ if not filename.endswith('.yaml'):
+ continue
+ name = "test_%s" % filename[:-5].lower().replace("-", "_")
+ setattr(TestTelemetryIntegration, name,
+ test_maker(name, filename))