Merge "Test suite for Aodh's gnocchi-resource-threshold alarm."
diff --git a/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml b/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml
index 437d35d..52c3cde 100644
--- a/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml
+++ b/ceilometer/tests/integration/gabbi/gabbits-live/autoscaling.yaml
@@ -156,7 +156,7 @@
       redirects: true
       method: GET
       poll:
-          count: 240
+          count: 300
           delay: 1
       status: 404
 
diff --git a/ceilometer/tests/integration/hooks/post_test_hook.sh b/ceilometer/tests/integration/hooks/post_test_hook.sh
index e19cafa..3663419 100755
--- a/ceilometer/tests/integration/hooks/post_test_hook.sh
+++ b/ceilometer/tests/integration/hooks/post_test_hook.sh
@@ -30,58 +30,12 @@
     sudo chmod a+r $BASE/logs/testrepository.subunit.gz $BASE/logs/testr_results.html.gz
 }
 
-# If we're running in the gate find our keystone endpoint to give to
-# gabbi tests and do a chown. Otherwise the existing environment
-# should provide URL and TOKEN.
-if [ -d $BASE/new/devstack ]; then
-    export CEILOMETER_DIR="$BASE/new/ceilometer"
-    STACK_USER=stack
-    sudo chown -R $STACK_USER:stack $CEILOMETER_DIR
-    source $BASE/new/devstack/openrc admin admin
-    # Go to the ceilometer dir
-    cd $CEILOMETER_DIR
-fi
-
-openstack catalog list
-export AODH_SERVICE_URL=$(openstack catalog show alarming -c endpoints -f value | awk '/public/{print $2}')
-export GNOCCHI_SERVICE_URL=$(openstack catalog show metric -c endpoints -f value | awk '/public/{print $2}')
-export HEAT_SERVICE_URL=$(openstack catalog show orchestration -c endpoints -f value | awk '/public/{print $2}')
-export NOVA_SERVICE_URL=$(openstack catalog show compute -c endpoints -f value | awk '/public/{print $2}')
-export GLANCE_IMAGE_NAME=$(openstack image list | awk '/ cirros.*uec /{print $4}')
-export ADMIN_TOKEN=$(openstack token issue -c id -f value)
-
-if [ -d $BASE/new/devstack ]; then
-    # NOTE(sileht): on swift job permissions are wrong, I don't known why
-    sudo chown -R tempest:stack $BASE/new/tempest
-    sudo chown -R tempest:stack $BASE/data/tempest
-
-    # Run tests with tempest
-    cd $BASE/new/tempest
-    set +e
-    sudo -H -u tempest OS_TEST_TIMEOUT=$TEMPEST_OS_TEST_TIMEOUT tox -eall-plugin -- ceilometer.tests.tempest.scenario.test_autoscaling --concurrency=$TEMPEST_CONCURRENCY
-    TEMPEST_EXIT_CODE=$?
-    set -e
-    export_subunit_data "all-plugin"
-    if [[ $TEMPEST_EXIT_CODE != 0 ]]; then
-        # Collect and parse result
-        generate_testr_results
-        exit $TEMPEST_EXIT_CODE
-    fi
-
-    cd $CEILOMETER_DIR
-fi
-
-# Run tests with gabbi
-echo "Running telemetry integration test suite"
-set +e
-sudo -E -H -u ${STACK_USER:-${USER}} tox -eintegration
-EXIT_CODE=$?
-
-echo "* Message queue status:"
-sudo rabbitmqctl list_queues | grep -e \\.sample -e \\.info
-
-if [ $EXIT_CODE -ne 0 ] ; then
+function generate_telemetry_report(){
     set +x
+
+    echo "* Message queue status:"
+    sudo rabbitmqctl list_queues | grep -e \\.sample -e \\.info
+
     echo "* Heat stack:"
     openstack stack show integration_test
     echo "* Alarm list:"
@@ -108,13 +62,59 @@
     sudo find $GNOCCHI_DATA_DIR/measure
 
     set -x
+}
+
+function generate_reports_and_maybe_exit() {
+    local ret="$1"
+    if [[ $ret != 0 ]]; then
+        # Collect and parse result
+        generate_telemetry_report
+        generate_testr_results
+        exit $ret
+    fi
+}
+
+
+# If we're running in the gate find our keystone endpoint to give to
+# gabbi tests and do a chown. Otherwise the existing environment
+# should provide URL and TOKEN.
+if [ -d $BASE/new/devstack ]; then
+    export CEILOMETER_DIR="$BASE/new/ceilometer"
+    STACK_USER=stack
+    sudo chown -R $STACK_USER:stack $CEILOMETER_DIR
+    source $BASE/new/devstack/openrc admin admin
+    # Go to the ceilometer dir
+    cd $CEILOMETER_DIR
 fi
 
-set -e
+openstack catalog list
+export AODH_SERVICE_URL=$(openstack catalog show alarming -c endpoints -f value | awk '/public/{print $2}')
+export GNOCCHI_SERVICE_URL=$(openstack catalog show metric -c endpoints -f value | awk '/public/{print $2}')
+export HEAT_SERVICE_URL=$(openstack catalog show orchestration -c endpoints -f value | awk '/public/{print $2}')
+export NOVA_SERVICE_URL=$(openstack catalog show compute -c endpoints -f value | awk '/public/{print $2}')
+export GLANCE_IMAGE_NAME=$(openstack image list | awk '/ cirros.*uec /{print $4}')
+export ADMIN_TOKEN=$(openstack token issue -c id -f value)
 
-# Collect and parse result
-if [ -n "$CEILOMETER_DIR" ]; then
+# Run tests with gabbi
+echo "Running telemetry integration test suite"
+set +e
+sudo -E -H -u ${STACK_USER:-${USER}} tox -eintegration
+EXIT_CODE=$?
+
+if [ -d $BASE/new/devstack ]; then
     export_subunit_data "integration"
-    generate_testr_results
+    generate_reports_and_maybe_exit $EXIT_CODE
+
+    # NOTE(sileht): on swift job permissions are wrong, I don't known why
+    sudo chown -R tempest:stack $BASE/new/tempest
+    sudo chown -R tempest:stack $BASE/data/tempest
+
+    # Run tests with tempest
+    cd $BASE/new/tempest
+    sudo -H -u tempest OS_TEST_TIMEOUT=$TEMPEST_OS_TEST_TIMEOUT tox -eall-plugin -- ceilometer.tests.tempest.scenario.test_autoscaling --concurrency=$TEMPEST_CONCURRENCY
+    EXIT_CODE=$?
+    export_subunit_data "all-plugin"
+    generate_reports_and_maybe_exit $EXIT_CODE
 fi
+
 exit $EXIT_CODE
diff --git a/ceilometer/tests/tempest/scenario/test_autoscaling.py b/ceilometer/tests/tempest/scenario/test_autoscaling.py
index f609363..3b0217b 100644
--- a/ceilometer/tests/tempest/scenario/test_autoscaling.py
+++ b/ceilometer/tests/tempest/scenario/test_autoscaling.py
@@ -92,15 +92,35 @@
     def _get_endpoint_for(auth, service):
         opt_section = getattr(config.CONF, service)
         endpoint_type = opt_section.endpoint_type
-        if not endpoint_type.endswith("URL"):
-            endpoint_type += "URL"
+        is_keystone_v3 = 'catalog' in auth[1]
 
-        endpoints = [e for e in auth[1]['serviceCatalog']
-                     if e['type'] == opt_section.catalog_type]
-        if not endpoints:
-            raise Exception("%s endpoint not found" %
-                            config.CONF.metric.catalog_type)
-        return endpoints[0]['endpoints'][0][endpoint_type]
+        if is_keystone_v3:
+            if endpoint_type.endswith("URL"):
+                endpoint_type = endpoint_type[:-3]
+            catalog = auth[1]['catalog']
+            endpoints = [e['endpoints'] for e in catalog
+                         if e['type'] == opt_section.catalog_type]
+            if not endpoints:
+                raise Exception("%s endpoint not found" %
+                                config.CONF.metric.catalog_type)
+            endpoints = [e['url'] for e in endpoints[0]
+                         if e['interface'] == endpoint_type]
+            if not endpoints:
+                raise Exception("%s interface not found for endpoint %s" %
+                                (endpoint_type,
+                                 config.CONF.metric.catalog_type))
+            return endpoints[0]
+
+        else:
+            if not endpoint_type.endswith("URL"):
+                endpoint_type += "URL"
+            catalog = auth[1]['serviceCatalog']
+            endpoints = [e for e in catalog
+                         if e['type'] == opt_section.catalog_type]
+            if not endpoints:
+                raise Exception("%s endpoint not found" %
+                                config.CONF.metric.catalog_type)
+            return endpoints[0]['endpoints'][0][endpoint_type]
 
     @staticmethod
     def test_fake():